var/home/core/zuul-output/0000755000175000017500000000000015135527525014537 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015135533403015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000241131215135533326020260 0ustar corecoreֶvikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs$r.k9GfD R~3N-俪|-ş" ^^^ӟx7՚jWc-bֳ<ξ>|Ƭ>Uח۬eyϫ7N۫㻯7bz1[/y}U~(+2'rs\mw6鮾f?&~|3_L2_f_ṴHJ2E$(Ͼw7 +]W7t;[V$+Wxi2?<{9<;>'m_VͬkmVN:`SLi.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9ӋzYWnyMfNQWs]8M%3KpNGIrND}2SRCK.(^$0^@hH9%!40J˹%>*@G83%7ל%scX@ .9s%9&M'i7wFP|؝5g.wk"5-՜c>QviYDZh$#*)e\W$IAT;K0Gp}=%ڠedۜ+EaH#QtDV:?7#w4rھxiJz硂Ϧ4Co9=]٣Z%T%x~5r.NL0}ٳv0^͐ J+D`7Pa`,u{S'ZВy=M2ֻ8GC/zE'`9ƭZre(&e'd,LFlPh ۬rw%wW-+b8_b|r wFuRI%TѬza\_3/wn>- MRyC jnz+(%2݇^^J"ͦ>CMMQQ؏*ΧL ߞNPi?$;g&uw8~Y Ʃi 46A>hPr0ιӦg q:Np8>R'8::8g'h"M{qd 㦿GGk\(Rh07uB^7rN_Ŗ׼O>Bߔ)bQ) <4G0 C.iT1EZ{(•uZZg !M)a(!Hw/?R?Q~}5 wY}:fs%1KTAA\cCȾ39h®3uO0T-Oe+tǭQI%Q$SiJ. 9F[L1c!zG|k{kEu+Q & " ځ9g ?{ j럚Sř>_uw`C}-{C):fUr6gmSΟ1c/n߭!'Y]cNdusmUSTYh>Eeք DKiP`3 aezH5^n)}k~hT(d#iI@YUXPKL:3LVY,ndW9W8QufiŒSq3<uqMQhiae̱F+,~C민v= 09WAu{@>4Cb#O\9fǶy{0$S:z4efb#hQ #_ފH&z!HAd |}p TRi*KsmM+1 P0W YW ].PK$Mj-Kp`zbbq$Igǽgr&P29LcIIGAɐ`P-\:BPS`xiP(/T)#ia-64#fڷbCVg峀%ّ sJV<XTtPmƄR$6~ :QbL2}q|Aq0m|Mq+ _ERƻvT񟜾[mm#?,>t?~=˼l?ff>\fbNJid % Jwe`40^^|ǜd]z dJR-Дxq4lZ,Z[|e 'Ƙ$b2JOh k[b>¾h[۷:>OM=y)֖[Sm5+_?&cj.i ˿7^1+]h,*aklVIkS7d'q N?s%9r}1j#e[tRQ9*ء !ǨLJ- upƜ/4cY\[|Xs܁dIv [@3YN<UKkZ{iqi}_ ּ};SN )ǘ΁ՁҺy mڜ]Lr*?Y0P`OCF"&FA*tonzwíRfj^-%[ R'l}jdX*kj1H`z8F5]WeߵcJ0TTƩ0Bly]e?>+ ђ(9Uq EmFjq1z9_^DןR24dwfm#Y~!%rpWMEWMjbn(ek~iQ)à/2,?OBpY]Q4`Iz_*2coT'ƟlQ.Ff!bpRw@\6"yr+i37Z_j*YLfnYJ~Z~okJX ?A?gU3U;,ד1t7lJ#wՆ;I|p"+I4ˬZcն a.1wXhxDI:;.^m9W_c.4z+ϟMn?!ԫ5H&=JkܓhkB\LQ"<LxeLo4l_m24^3.{oɼʪ~75/nQ?s d|pxu\uw?=QR -Mݞίk@Pc n1æ*m$=4Dbs+J \EƄզ}@۶(ߐ/ۼ𹫘qݎt7Ym݃|M$ 6.x5 TMXbXj-P\jА޴y$j`ROA"EkuS#q * CƂ lu" yo6"3껝I~flQ~NCBX`]ڦÞhkXO _-Qy2$?T3ͤEZ긊mۘ$XD.bͮW`AީClСw5/lbl[N*t*@56."D/< {Dۥ sLxZn$N(lYiV =?_e^0)?]{ @| 6+#gPX>Bk2_@L `CZ?z3~ }[ tŪ)۲-9ֆP}b&x Uhm._O 4m6^^osVЦ+*@5Fˢg'!>$]0 5_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3nϏT=1:T <= pDCm3-b _F(/f<8sl, 0۬Z"X.~b٦G3TE.֣eմi<~ik[m9뀥!cNIl8y$~\T B "2j*ҕ;ێIs ɛqQQKY`\ +\0(FęRQ hN œ@n|Vo|6 8~J[,o%l%!%tyNO}}=ʬ-'vlQ]m"ifӠ1˟ud9)˔~BѤ]һS8]uBi( Ql{]UcLxٻa,2r(#'CDd2݄kTxn@v7^58þ Ţ&VY+yn~F8I !6WB3C%X)ybLFB%X2U6vw8uUF+X|YukXxVO(+gIQp؎Z{TcR@MSRδ~+1æ|mq՗5$B᲋eY(|*磎\Dži`dZe j'V!Mu@ KV{XץF .Jg< ƜINs:b zĄu3=Az4 u5'og^s7`Rzu-anOIq;6z( rx߅ euPvIɦ7聀t>G;_H;2ʗ6 h6QװxmR JQUbTP2j˔Ni)C)HKE"$ӝ!@2<Bq 2oh80,kNA7,?ע|tC3.㤣TiHEIǢƅaeGF$ u2`d)/-st{E1kٌS*#¦۵_Vu3ЩpRIDr/TxF8g4sѓ{%w .ʕ+84ztT:eEK[[;0(1Q@ET0>@wY)aL5ׄӫ A^%f+[`sb˟(]m`F3 W((!5F-9]dDqL&RΖd}})7 k11 K ;%v'_3 dG8d t#MTU']h7^)O>?~?_ȿM4ə#a&Xi`O}6a-xm`8@;of,![0-7 4f kUy:M֖Esa./zʕy[/ݩqz2¼&'QxJE{cZ7C:?pM z*"#窾+ HsOt۩%͟A498SwWv|jNQ=-gJ8@o2Ƙk's~4Z(I8!H- $B*hN/ٟC:< iuKVM{M9$1#HR1(?b-ڈFalG*|EX}-HAP'Hҷ$qM9(AHx!nAF 26qx}dP!NZgҽ9l*(H Žڒ;̼o|%D Ɖ`Pj . ֈ,ixp`ttOKBDޘGGaLA2s0(G2E<I:xsB6<*d42I:<ŋu#~qu}hW<2~sQ3W.&lnľU74c?MՏڣW@ -N*CB=i3,qjGkUſu2k Cb8hs&sM@-=X(i7=nAHe%ISd$&iA|i MiݸT{t]j顒x.Ƞ"m@Hy_Iy/)j|s#RGI!dTKL&4K>#stV \G~~]gaSZNg8>e!^f%cYr]qs:"̊[isXa]d+"z=x7p.fZCg_Ys;pE&\U}ܪch])qKYAـhhdEnU14.& * QIQs[rԩN·k83֖8Uuqu_48dHܥlWw q>fu6+'}x{u\Vee9]`3=?,ڼ"ϴq*(A/ֆ:1IC8qc3SR&.w,qk>MPs+Xh4iyvGRQ=]te]U+2gSt!8iIۛ*JgE7LGoƟ\bC}O i ycK1YhO6/g:KT sPv6l#uN|!"VS^΄tcDp"'0޾{1xCNQ1G2})*'>fC۝'*)"5.E2IeD 2.ZdrN6Uœ=/}J o[aG%qf*WU^k1f3ڣjm׃>ƽl' ,Τ9)%@ 7i!eҎ\,f+,Ąt8~ڳ~?[\FV.A'!,iGow5['YToҍf5ޓ[he޿95S8DGZ@-#]z:\mOo{F-8G#%.fM8Y=Gg9R7'a9NPHGnSd?'xc W3\+s$N9tfGw1n$՘#TI{h5pfIWGev.F_{!W*qW}ӼبZ&7!5Y'3KtpŷU`<d5J{@;P~%M q 9mߺ|Ys2uƝ:e|ҳ uE UIL`F &ni4~n3?n-Kھjۅm''J[/m$g4rfrvRޝϏ_O'/g]fyrd-i-Iv#GL`,ȃ1F\$' )䉳yO=!6c+#  =J7A<+  mbhoCiWgii?\e%pf&>7V<*EzfH{]:*6M x v쳎M'.hO3p-IGh7 ܆hR ]zi2hB9'SVh*c6q?WgQ-Ns%טCE?Ge먠MD"+3@'VQXu/:.̀1җZ,{Oǔ6Jy%١BbFM=$OQYꐙ^=Zza5Wa%פG,ϒPV3^Kܿ)qbGǷVOGdaOU'tt!ƖۣRG9lhfd#]y=D?FT8F}$RD<8 ].vޜ-v:8F+Mt|g`.!! р#ݴtMƏp}3ܫ^(jL}0EƥN7OQ.8[ʔ`,Rt:po=0-ʁקiߞ/tA3)i>3Z ixUق0D Lwbw Z8uzS"\M+Nl E<^1;I32IpBiǍե sZR cku]dڙ@{@|؜J"Ҫ [焓&pMzxdysj8OZeIRk~( $LG?qކ );B̗ߐu"8c`td 1xh˂U4Uʨ}~m%f)icʚEAR6r9!Go3 "ӎ̏/k{8L%H㸡]V.lM>*2뭸 kn=V%ŐnafGw1nrC%іoU ' 4Is"]<ӻ|_>?E7_̮MBvLϸY&0Ro6Qžl+n7" 㬙g|ӡFBqOqNx^eCS73\ZS^ !c-!b"'9k I S2=`gb/W?=`]@0--VVYmKW^k?R$+{+CU )?wdWaO!j-'w0ŝl1!i~I%g{T~*?5P- j\hЪQxchKaS,xS"CV8i8'-sKB<չs">|{MC8;MƒK7f~/FXKRF>j XʝAQGsG%|*'@x6yR>ngg 枤hˍJ_S{gskI\t`綘080ƱQŀllKX@116jio?NrUѢ)*v|oãee@7.z!<7zG649tV|̢T`"T:*Da*nIClz^F6!ܠqK%$?E)~ty(uGu() C>Gn} t]2}%1NodI_Bǂ^8]3m!'(Ֆ5Q&xo 8;'J~bgEe/I du/j8MۂqdDp^Y\,8>ף|$@D.ͮl`p48in^.ž[f>M)JÌ=jO7ӑЇn-i3,1ֿ5GodgP\|bЯ݃sngir^$W v:?_ͬ5kݰw[!$qRD `MHVuV_K2k*`cKxuBG&24T}Lai 0Va(7K#ӊ!,ZDxFQO*lם>!4ӥ2 ]8â6 U`V%`!c%؎ʨTzrKh! c.}.D>)d_ 8rcu,wf2?Ǡ*_lDn}rauyFp*ɨ:UiM2r:9ct X1lmĪ o玓,R%!`hGT LYF#g<cm${|Xdu4tmtїUJ\~dc0KcMlf2?mμQ ߉J4WrSHTdp"ӹ'cJq2zPlX̯.0H!ND@UapVoGڧD5>H]f@!=߸2V%Z 0"G4ȇʩ@]>Y$ًF_Mm_Tt)ib+q&EXFu򾬳ǝ/RS>r,C2NfOjpcm{Ll9vQOT>9U;])>6JdbXԠ `Z#_+D[7IIjJɟUh ҙ"`"a ߒ"G̾H`6yiCk(OA/$ ^%K^+(Vr[RR1"u4A.1X0=7f/"(o9/L1X{]q`Ȝ/; 9a>E)XOS K9mUxBa"'4T[Jl /K/9,rlCAj_TiǘP,:4F%_0E5IE'rX-|_W8ʐ/=ӹjhO%>| :S Px„*3_y.g9| ;b`w NtZtc> ײ1KĴ{3Gl& KT1ZWX8?C]~We$9; -.D087?1a@P5B,c}jcGȱ WW/ @a#LA4.ٹ^XڋXٝ:^Izq. ٽƎDn6ٹBc5Lt;3#i3RAٽ9| cbpcTfp> 6L/_x 'ۙz7~w~);qU9GDT! 6]c_:VlnEUdn6UˇKU;V`JUݵޙEO[)ܶCy*8¢/[cչjx&? ՃJȚ9!j[~[' "ssTV2i sLq>z@JM->=@NỲ\쀜*/) ̞r21.y? bO]3?C!yw3ޯL_Su>o>&lrw&i"< :]_<<7U_~z5є/rfn͝MLmc 6&)e+n7cyy{_~궼07R7wPuqpqo{ߟ+[w_uOq?u-|?WS_tOq?Eu-L_p?Cz .e ϿO*3 `Ђ6a-`kIf-s,RL-R`1eL~dپ&+IhYRczr?㐟,v~,b6)up)3K,RLW"Qd9JgT\1f3@Kh% a4x,kA k ^d kYj5Ah𚄓vXZhX1xҖ51Y +Id ZZ\C| fD>hB֡#-$+Jpሟ,Cg:6 3 xH "}C[`ӨOAFn5ʬLHϰ:N@VcyBI#Dr. "h hg ۃm-qu>V&൘ G7qi#^tҒ[JI!{q*lrD܇Gk@;oI<5xZ4xM"؇'k!>V|lk'{d+ :sXӄc)?W`*|\v aVT0"tMًcΒVz]T.C$cEp._0M`AlF̤@U' u,—rw=3}resLV&ԙy=Ejl1#XX۾;R;+[$4pjfљ lݍ3)`xvcZRT\%fNV Q)nsX }plMa~;Wi+f{v%Ζ/K 8WPll{f_WJ|8(A ä>nl"jF;/-R9~ {^'##AA:s`uih F% [U۴"qkjXS~+(f?TT)*qy+QR"tJ8۷)'3J1>pnVGITq3J&J0CQ v&P_񾅶X/)T/ϧ+GJzApU]<:Yn\~%&58IS)`0効<9ViCbw!bX%E+o*ƾtNU*v-zߞϢ +4 {e6J697@28MZXc Ub+A_Aܲ'SoO1ۀS`*f'r[8ݝYvjҩJ;}]|Bޙǖߔ 3\ a-`slԵ怕e7ːزoW|A\Qu&'9~ l|`pΕ [Q =r#vQu0 M.1%]vRat'IIc(Irw~Z"+A<sX4*X FVGA<^^7 vq&EwQű:؁6y\QbR9GuB/S5^fa;N(hz)}_vq@nu@$_DVH|08W12e_ʿd{xlzUܝlNDU j>zƖݗ&!jC`@ qэ-V Rt2m%K6dX)"]lj齔{oY:8VmS!:Wh#O0} :OVGL.xllT_oqqqLec2p;Ndck[ Rh6T#0H Q}ppS@ώ@#gƖ8sѹ e^ CZLu+."T#yrHhlكʼE-X'I^=bKߙԘ1"+< gb`[c1髰?(o$[eR6uOœ-m~)-&>883\6y 8V -qrG]~.3jsqY~ sjZ+9[rAJsT=~#02ݬf¸9Xe>sY~ ae9} x* zjC.5Wg󵸊y!1U:pU!ƔCm-7^w]斻~[hW$k sE0ڊSq:+EKٕ|dvvjjy6 æ/ML-yz,ZlQ^oAn-})xǺǍ--qcl:WLg ӁvJ[ǧc~Of+8qpçco#rCtKӫce0!Y-+cxMK-H_2:Uu*corD~@N`#m~R:ߙ歼!IZ5>H;0ޤ:\Tq]_\_>e˲\oUQ\Wߋ47WwߋKpwSSۘF,nC.\UߋoVEuY]^VW0R=<ު˜˻ x}[ێ'|;c^ M7 >5\-> m-8NJ\ALd!>_:h/NAC;?_ξqĎ6xMY(=ͯl~l8V0٨T zL{Ac:&$ ^CpH*DW\r2aR|=(L X1|wrO_g ux1^^V2޲jMi^b``Q#dBxV#NBk1;DAV$"*1]Y~ d?m\Y#_ֻnvwӤ"D֍*JNߡ$;NG۪Q63pfHt IVAr-D(@Xy6dGUa2^/c)JYUIe:ޕkǪ +&BBwxYOM9vXExZoHaQ2KQmϛ{]Yǹ,R'KmX&Bf~j<O`R<)*2Pj;V3-mX$ª`UqHhɠX2 d7 U ƧႏOyɃ3.hƴ `Z8gߎOv;GI~3V sh~yphȢyi|:1B^_ikmy͗ŕcxqeq嚆qzy#OB?xY\Y2^Wb!3]xNh17CMk^숆fڀ1`иj=HV"/$r^Bϴ~$_C^W%ò^lP& Fdu6}wѴ kZ \+y" 8&8{5"4*'b OL1T > RXQ𰹾.e,_|L'OUU׳P**Ŭ%Ky%{]z޼:Dy9Ia,&q&K-PYelHZinIe}KO7aadȣj"sʦOqH<͗ &PB:Kކ?ZүԴ P~EE/(閞)Džs -#|:9i~N@|t6 [J \8AFwdЈ Z4Zr]*T?s;wrwR0G?OLYOL'^O.//iTnZ8X.yD4蒗ya38@ހDvINO((7#b) |*ǴAwԻ[ Ǒ'qS' ˇg2u e\ __ =TzЌ/xHGl)k; °~ɥ]74 ϟ&9e'  }<u0HOE]_ 4ՈY+i|(4v]ǥs.\N_<Byy*qIx{O!/*!{SҊN+QL+Ѽďic 39n.Y|xu ec~|uck8͵6ɹL2^6^V 5 9J+cX xU6 ;aBdb{Z CWCyЙn>7t@P d7m9Á: D2$~t&1&:ǘWUYLYSF C.v񶄕lݼ<9q3󷼼˪_=ҟLX ~#W7Cl8뼯VQH۳3\鉒⤝L 0KL©>u{?64Hx>N&wdnglD8ڦ˼..2ZEJ؆L)Ըhg9`AI6/ǛoWM &MЕ8=6'oN!\r_QkmoȾ`YIac|n|ڵL.ZE:4EUrN);{N,DnD֒|+hr;}!|zbi|Gk,%eMxĎS9grܖR\=۵{țiR-<[p`7 BXE9eohBeDwo>?QdJu4dnetgg 7`3|8y{ÀJZ!) l` h`;]p`Y'ġt{Cc+Teq mu#i+$ez_ccwxо;-avOh-ڗ\*Lj:L[0gBvԔ52ૼ \òXŁi?q )JJݗ1RE^nj7\Q]$ȥAuKW+S'+7R%ޞ 5u1߼c@]hM(h#rf8ʦz̾̊]pՉRâHh_d>X3ʅ8YIQN۽C,T3%;|vmKHq;q *5Ѫ`Cc\Ӈ9N&Ns)z!D\^Ca>͎l{>]WE0ːkMG6d ,لsԈbw).E⹻PRHIw!%/]Iuo?~g ]k,#1ɛDJ]w0^'4Nq\]n2\\J}Ύ0d4 3Uqa0[_Viح䱹Gl[i-gy7R?LSI:GT瑄8r@KV(ZhR՘説j$*o*e-|_jIĚ [aZ`"漐],uhIpV̥K &@"SPla_hUq kv $q{7LGUe6kC #NS^O1{d`Ҫ\禒n.?lRG@G辵 y-B0oyN  '=:.ىu#bZۇZp1iӇ1TupqO)tw$Z*tmA-36ݪj_Ap| eEɐf{:oMR=gc@xjMv˳ʕmxn2!-hW+-yMh;mXuİujblGM6Jj$JD從"X/p) 02%/ "hwm?Ϯ#yVsuo+t8 i{uX\{v[|k R4 ՕLB@>LogqEeb;-+(ٻ}22i?(Ԓ%F3ɻq aE9y @PXN!r՞r+ 緋b7>uO]a/jS S9*w!Ԓ\(OowB<|R*Oϧ3I.bvX.)C{vDsu1HC1Sݦ[V|۵fLUv4SMrGCEATdD^a3:gOV>7zW8en: }D)iW`1R_rWT萖gjkq cKU$.YQ,ޑI{|tUwG\|98}R- `zGX|@X8tFN^8dXŠ?խaVfdTh9y5atS$@t<` K@Sx>JC8"qcK&k<8qk`4#g܀VM~OUq~|am}P= ۀ'n q׋@ 9ҴHp(:ͥFy.`D~g X6Hi'd ƽpsO2ր } b{3Sd8;O/agT>ڤ;ogvpZ o!@D-g6]ƾoa1g4Yy{M|DWTi[ʫN9SL0PIAC׿uz?{6 U;w+Jdoc'ĉ׏\IٜH8Wu ɲ"t̹% uh]CA[ZDY,(yw sGwyte,pmh>)"h*~B1&=>ycx\~#xJ(tA  JK֔tvʃ*QN*ɀ$*Ss\8[v?tU~b?"v@?M6Ni7oѾJt+td^j X2[7+`O>9xpi{Fu/d-Ps `Qj 7 D[zD W.2;2" PMZ4( G"K/*`[D_pBh'< ʦI)M_-x [3Tz[p{FOu4<fK) @BW7N v x]8 nl7GtWetIJ X6awR?LWq;o`9s(V% @QQaZ &$ ^-"o`W]惎F#lCanb N,73t)lD"Aq:n`ΙD M|kGL1ڹ׍I&Th3" RtgbVnD"s!'&w# xWabtC2!#qE!^^%Vz- &@C(sƦE//^ǫe.rt ݐvwKj 1FTAW'Ri`ΙΨtPp%awБ.ӹ j+XPۢ9B (ms 2iBm|Ek64u䈌ڞ J@⿺)iP;0juQ|CQ :bĠt댞: y88k{RG׶a:*7`?cF'!BuNXڏNѩ\J$2uݩA*1P\TG-;31"u@0w$Hn|L"ʩ:̗zn`vw0hNUۋʠ}}W굙ehܦ i P~'JunTmgPOmT%mdңJ/{x!{2:}u~{`Ú䡁 #U;gS,d) @W3! ؙ$2uM5#A4D8XY̶ϩH*bT3w쑼`){%0ŋuvǕU-,1ޓidT nrH;~z8ǜ4h}rԻY27*ti)jw\1QՕy z48ܚ#AYfHzd>{4+&H7Lݴᄎ>ނ$133NMG ;t}*\p}.2bČ5*o)\ʃK!d412FXk#`<p5=-5U{Z="} ŃrxJKo6{ӎkuh, 垶SFY #"_pGLPͮhmLQ IyVD(%?`u搜Ռ|ҮgiLϠO;o:tm@/>.]OZHxY<%ߧ> ,VkAϚ/8JHTiqq[sUa8:a8tI6w~mӑ<%uVD`LJJ&A@Th i1`*bsҢSb=)It$Jfx%aV݃>Q֒\o Ǫ'GKM14<7"=6HԇV0vSA n Ui'V =!4x4=_7dhm[UHweW[ADg N`A?G~= & W"(CL>a2f0LG&AC+iX w1EEV]Oz7Wc/WGu-wZ~.co"@ {GU-nTjo\[ .T`* ŒGibOW0|m3[is9j鋹#WcV IcRr:%'yRGx|O%m,3_[Z;N'Ey֎в\ǦVKTuɟq|&~=>GEatz蛭^:)nѴ\NzUz|ɺx.݃&ܕo5aM]cQgS+pҡg19cE`X3ACmϢ:? m'CkeG5=Cx^, 0œ/!*~a5<˳)QPgJ\AS7W [AhMpm4vo ʆfE3g>% z#bpy| 4Pzoh e$|ڊsQFEcn>MW7u$7>iL,+}/єϦB,Mtaڞ[ ҈i2k9VXG28XuROp' QZi! j³qYyHG(, < A.=k/ڦU>j> C'Ga!΂ewi;pk6z\tn OϮn7 @셾M'uPt# &B0'tٓfB4p8z 39/0ڎf[Z[Y-98܀elq=m+d&:QM:)4 Z9WQuLEًO+Q`y ֠y0=h|ˤByVJ [J'>P#tBWnӧtNUkҭuc]}snӻ2Ěj>TsGJ- ڞPyZ;jmAL#ޑP{ BeB-u'yΎ:[.nA=uw$݂PoPo B G#˄[oO#U㏐c1ψ7$i] 8U~H~Aϸ7t78w{ :?z%FY)tьAeV}*v ʘct0( Qs }t~vEWa,Nhhz9ecfh3N,= Жi8 J<)]t4]Poa(Yhqjr|)0xCihH`U.rt;ҋ&xLi<# 4{A=_ 54v퀑T[~ЯYb~ ~(S-Z\ e~A="xӲ"[nJq# \MsX0[>P 'a:ccVp2e[Y𵧽GE%ӦE|]nV x>OЖih/^:z?[cgpob z¾ĕ c?h,6` q܌~#e:O:x("vUko3`?9tǚe*jWig(O%0R eX2YRȔQ)%>b-8d]2f{W9Jl)Ur_*ӫ }}piOtQpGI Ҭf/e*гptPr>^0b][eN 3txPc-ܱj C 6S1Iulg!uw*4F 4 (+|X0b D6BGD%@A Z[4C爫Z^0*e|AjJzSS9 &>cV!>pD͚%PJ<0ׯ2su@^-L懻Ђjl1yd;^H2-wt_^GEeAfIx~J!].DP$K*јMnUL?g+\NA>eþ.FHFyf,Ge"$yfGpYM@$'Qgu0]Ǟc?"f|GHOgo߷ ';rXcd$NGHÕ=EDێQ <_ "$e:-ILuQtVxAGiLƥ.h/x*258mNo2`<5TAF92=jlE]hJ5HN 56VmԦcڳ(ߨ##M M%%З$1ٽ1=IL:a˷888;b|=<a] yLmme:G,s ZגV~7QqO=agKA[8JP~Jx`Y;ݪ{)\CgJ#V yY[gH' `WfBIeD,ĵ.|<ũENq=~i&1 ٞ-G@Z)bֱűfWk:eTk͞>ٖ81{#@dB)VvdmdX%=aY>4D! o94K[rK8aDvΈOOIc`#Ō7Hkҩ:dTde\rQ1r0hjf,kBDWZbˆ^nD){K c*CpV\i@{L)8\[ SjܨsKdK jlm8ۃ(AX,m&#Sg_@D1Y1aF>7xyT*F6wg<%Dc=Cҝסo 8Kj}a,3\Ù['= $sS!{Ί8H758Y{,_G5B}?#R7p6Tg1y6/''_{G &Lx!8MG)qK'!d&^ȾS\.OOu w .[s]ͅwj Sb2-rR=|fP؜Sb|9%((JН==41:pc2Kz r$&iIX!f4jΙo@7-$j (-?ׅ"*-:Dj4iݤ؜#nH51h> =I+ed&[`5[_~*AC'Hu _B2o$vLz-tɉ;RyQK޵-,_99y+`dD l~A=ٳ'+BN^ʪ<-zvusN{*&WzPW=2~O~?oO8wyP}psJ,>;KEo] 'Ϳ )|B 4-<ԑd;,_@:wԣC3mx}ڃI)B.@5Mo/|4û^(DZv1:~@a5~ȢhY Yc(>RxH̴h3T;( VRE $8]B:X(o6;$4i::!+r1* $x{q8,-\ȩ U'{ DQ/.L" KU(Ajte^e("sYps <7jU _Kbt7Pn&z ^[nV* Ct¨z˂dW*ehtOGN]qxjԛcMx). $/{ς\Jd 4yҡ"S$]6+WkO4>%x2b!O>jc+&ߎkNj4|l@kEP6w|%)1}TO.NEc.`ԦtԤb- D3RWc* F !ZԪ/%c y$'O&4 G?0gʹZsw6Y_12B3Jcۼю;3P,B朂Gy/_HH`+cU 3E҅a~6p&=y4.:291.SZC(bX ZHsU x"$nrWpƐiUUDݘ(>^U@^S cu`7a / qr+FM!P g "Bq%H:!{$^fO PDJѷy,?X+ν%>A-hAF)!.ѸjG=qxk99ӐƢx@{& Z:+)j.R$]5܉rra!2'?ybѿCG(3C֙'-'NNZ[hUetu&7I:TDkGPPİ*kΆďςc0ӭ@oYtDTP$]:_}'k-U,H(q4~eQ̸IݥWEݵ8:{xgqr3.9T4 FY̱5cpCx .p|qXp{r?,6Z;wU݇gI݇]>n2 kg=_1yzF2%jB1#UA +`B>:R!} 8G:cVYgG8nζE|IC;yGБ༯` %at| :I/晥+ɪJڜ9dAn2ΆԶF0Ĺ%Eԃr/`^MҼ1hB(bرb/tc:pFdg &uW-Eez}yt|7 2!me;W}b|89\뾟k$[ڥq8ywrs(~?M&̸5d$h*4Ck~K)UT.A +˚Fb _+1yHߡ}إ`&CΣ"r)b9|h-ۻђ2blݗ1@=ߍCsg/U4<Y5o$FcN?Ԙ?q M\!t/iesÿ/<}EvK=1"hMU#ƺ* 1H,>n=DՋAls|OS r!ѽ}qXUeN):Ab  2"qSb~;'/^Mb+ӧ؉;9eU|Uq X=>7K3,W @zDL; kN"e6!^U)tQcym%&t:q^]\IRqWyp\}\ S?JɹICɻϘsʼz>Mssv6ug$}ID?0}X45y@Q=)biwp'N:-V?Q&>7r&֖n%eѺ_pRh/Go7,8NԱv>$m|bnp.ZQQv=}7dڽbE@KNKCϹQwu/AtI0͉9?Ox-0Ӊض=9?yLA:- ڠd|)c/߱¥;n8_!,97aǂWapꪨ :H*[K%xYMJW51x gcnQFwz2ɔHk˟22כzϒ(k&EV 鮊rai tAl 㯊Ed?L'h_d2`<8Vĩt1{_|L@]u-\(e`yFel_6wO,0J1m "[H=v9Љ+LîRvԬ}r-cTIX͕D>ԓ鉿h9ɛ85yZt98𤥖 7JQ0@x䧥\:x\XOhACY)x`'k[(]MʂY1ƗT jɀh)|~VAR{k5`tceXU \<"鲇Ҽܲ)G!S ['w`m;XfJEG?)up #-.)tKD8|-"=Yb⸿8yzLVCJJd)+fWD6#E%|yLfzB`CKP]i<*kkK/[} ى4mdho&ׅ'oH,i6 d=kWgPfzi"v6,l)k;3XyDN@jQ1Ÿ لQX6㵝:I8S5 5 p>o{M BHKWޢTLߍ%f\ewYptCvb;SI*_>4_GCVEu;8It{c`~}mX'UJrq!ɹU޵6r#d/~0`<\v`dc܈,idv~njIH2% nWzHZ.}22(vmZ:F_"RܪkU)| E)rbFMY @D瑵bꂺZ{5ukFvm3w8oQ77t0RܪVkz@iնnwP[vձx:;%4])dpl6AQ?'kvP ɵwi]X1Ʊg"Evhy`y|ה7k{5:M" a ⃟N!EpZI|H63өק)d RyLIq<- vn@ZG$S~ulLu7k&f!6V' sb}R{OthλO,"NWɥ@{c~‘ԝ&@QT[NU %IJ>(n:3W0THQkҥ9܏p8ڡ緫= DG[1Ş׆AҊ: 5koZ?zfSs7#O.~|C jpG{o`¬%|5nXSwS"c5 ~SUlQU}07~<9 &$U^ G![eO~8o&^عNGitO=JOxar ɼOtIM1=}vs8[+2p:`ͦvQu9QkZ6ދ[&%q?y]ufj\\ Mwj2+;0{/lW5sDEeWǹ7;Va<)+^eTY!Ή?^!^&5ѹigswYJR/B޸,m+4J>qza{FVM~c [u+\zܺ-T1X&`@1v]2(*v~y<ɛ}ZuJH;{|k%ݸpo61d0;>8@nL7WB s.L:_s;ژ\nD|:8 ?p~R0\\}~h֏es7Ӑ}8Us,xڿf5{CL|/(\nnO*bRFv2RfmFdy'W.ni|F87rFN7Nm+Wnsw:]9 (k|ЅjN75sFR(s3[9ޗ if0U?HB[Di/IT&r x[&&=JVyrSFL1ʭ&0t]h$;+&URQ@nmo fj e&VH1jdY\  .i`MV>Taj'SQݞ~FҾuB7 U/Je@(P"=X$GjC%~7]dPu:!6[h]V-cdjn&+t1lJ-%Ǡr܀,x;ݿq>Sߗû*alf.;qh㲓}ˠ׬\ك+k;n,o<%jG=Q+R3h}N;oj%>|Yۈ8{_Zl.‚!|[YG -u"F'`foR$QS%rGq䩤{8cwH)/),.CE^5lj^ "WOjޠ٨4SPRE*{yds1V5]ˉTXg!g)E< 57?LA ,׈I s/E\*N<`YYrI$o=+i$ǕS ݞj5ޝsl2hʚ~}p`hhuNL>f,VRgl)0"]Fp0RGTQ (ִ{Y|RbjיTnxX-K5\}(ՍR/!aY\N:G^7[@#~ :9/8Zo{&-k~dO_p:QfV_ E /2| @_^_S4+RK"+j}*!hK?eTkkBvt~}M\[P_me$f x{eN,|M=RPfT/u&tNkckdZE(CO΁g[OB)_R?bx?\5G 1[8 5Druk}jr(p$ѐcnVNڞQKAaո#CYiHJe'~aݟ|6)?x!al Zr xi19MbcNgU wdβd;CsRP gW-f`f]M>?:-L; 6EGB"hmG&o> qDJ-X8E <mG1̤9% o= S;J?^ĵӺ۸s+ɛ#뛦겞S3osQ5Ȅ(po@ %-~kYO(IS;0B{8#4H7$2!p2WAH-' o08v9%8-f6G[xjGI2%myԒ)X ɥELm =Wc=:w3[E[#b<[KFjGIlP݇}[6-5B5|wjӡ `ݞ+jK/RKP#~i g[jЇ^.@G~?;?Z\x D)eQ[w}WĜ݀2u~'Qió_bzvW 9C >^K0j@bJi??"Ts߅<_DЧ t|ƳTR+L[J\\@l2\ f'HMA[FH(Mq8em[aHcXC](`}K<:j,|`c <@ fuӞSKgrc)rךZ 8ubm&(9߳<}qzEz6a.=XLE1Mt+KmKo;Pt'25GO#ݍ`9O|.zsVnڋUTb@ Azfvi0<`ɼZ\<Ӓj힬O3/׉-kl`ɈBiZAA%x~*k%5-VRskk4(pAq# B] ;ŭk )1 pl2'@}k;X!&b P є@*`fL\c")dS105a9J10rØZy -(D"PAc՞s[օ/+m#x&h>"5Z@-0!5zJ7!(67:])S#k:1y|6\|v2Ijdbz~8_50LM|| F4F"k,؍`ݞ,{MR -Ԗ<EZyND´ FkJ.C`p#>\,6zGyOQ̰J%^/;>gu dLZǷ = 9%r! խB/X" aU Ҧ$z;ғij)eBL aVJdDNIZ:n)E1^ҧ8HI?RA]@B,e"g8 F` 6!h)c-/)1qNl›#C@-{cLis(>) `Ѓk3VI%ù\"ڸIR9GFb#0O W8 BD+ &z$?{W۶B;4xII\XƺȒ*JN ,)%ԇ% Mm"33h!rݧ,.P•R&+1% 1F,Je=b~xc8` |W$lv7,:o1߂J)9%TZr5;T&^i` i8I+𿟶|9ɰL=mMW.ꍯnQȶ)!k vfSYXvрocDe=.R'p7gIf[j+4Rk҈)IR.LcIhľ j|Fه1nVfl[nAQN>(;*!(IL$Vqlj әR1,ej6*%NTa2"\fTVI_P2;~bA"f1*ER%$YT4*O$ΘtőIf .2sW8wԦ&C9hTdBV]%9!"p $b4")'"Ix, q1nUW4&g4%v*1mlF@|)jMYw1ctEp ddN!w[HDZvn& Z R܉]g7%\4- ͌G],QDx:$I̛3zlBB8cZy2+Zj~/t(dTA'_gMn+:Le(tjg|֌Lx8 R6JԗEr_X܉K` oNZ/W`,Neόj+*DÑ8kӧcԍ'p }UYѡD]S*ѧ]`6~gQlk{^0 tW_EbLMÖdw * -Ռi&M%wI,eE822=976leѤ7/ , lu1x2i@ :w ?˟߷}0N7.-^)~q~OvPJ/^My?;@o/ 0񆝋7v.~_G76O^PمٌyַRz8Ny0NoQ>Bag+6LH*kӿBR/yNC/0=:ot.<rxF]w`.~~*?ڵIMR2) >|S! F`|5Y]1rO`#\r'h`A*u:2Y\_6_o2q#Sܚʉ>^cѽ.Qg2ޗfCY9wx#AJr^C_O~ĭ-So@ׁ~^Sq$a u<^O:KtbHC1@N9~ѓmOIڤT2w>x|cSX'^jx~~xq}SY c jfFw;^WqwLk3`V}pzCtZ.ZEOm,>4uNP߂%DAz0j:ut޸>)n//@^Z6cZ=YTzvUS 69ide";k Tc`k 7T#cs|ؼm- |sʹ8"v21TNiD^DkFke4VG1~z]e.Ӛ(5FU#Ŕ^x,x&4%K0r|K0C\44#U)~pDuhͭ[Ӯ9.2Zʌ`4rb;hF8TFd؀x\Cr(P geI'5k* o oÉ_1~[),w'4> z~Sڜ fTA7f [ s{2=rG|&XG,ZNHu>یld}Vjcn>/40U20綠rAkX\tq~|zE\xYy1L^8T&*t~B k/"ŧ\:|JO@ݟOVD-|sQ6R)CM` =2Mk#+c.%Wl4W?Yo6bW<ߛ6ޡVfU _ l[[$L?^,'[>DL+6o_>uqT/Iu{NgmK|#t,HyQpVVR.L^+Pr ԃ4-t1at`¿zxS B/[`0|oLJvL~OLMX1hیl:EHIS 9~ښ2& ƣ0I)UIvzٽe.p6Q:wҰFtp6 kdLSa#pf}pD&` -i!~\Ym3dяϟ:+"}72}|sg饰\ %BSY}j]hDS'/gM{Y,_%HD8 {YV@?qgjYQ]tǷ+7W> WF ew8ƥQE%WTbqE"2\/ULÇw_tSp >.W@/ƄBD!R4 v?-[/z i齿k|;t =YϦ03/r}h0ur[|뾜ۀ eWx%C=7K͚{7݇?~Br>5L 7MB\T _ELqqc(MScuF4ӂ_IlmIy$3*2:u,M<8dH+5xט[Aq!Lgn8^op+ d2RD. o}@,/ƥ8)OZ:xͤFbl~ÏaoY#˂V}Y! BYa_ꛖn;[9gXˮH_[kWF|3 ,_(~,G)dwoܛy7GjWs.GeaޅXM]eo"+K$jowDu}К?rx KLJG" eYq$0Wgx+g@oasLS62&X-%?_^uO+Tj]3T;C3T;C3T;C3T;C3T;"9m!5bZrFlb[J>]FxkKr ck09Kva /_/ WSFνSF+qA`j9i@.\û,~q2pc V ՘Hl%2l8uyzė9v2K/LO?jG[i+~rqXηbaۃG.;0טP1gQ(arVg}ĩn0!TB40Km5gR ə2S-tiY %޶p*` :TjXq^x^Ypu+6>v.ggq$IΪxmR,%u,oPEɢ4Mk4@YȈȈ?ޭ!ypWEP/lO&ɅT_RW=. j[Ǻs4ꧾ>/]oioaUzwU\l󎐐U#4v8=T~T{W9*e+V5}\IK/T@߃>:|u6A:y?͒{:-?VBd-r,<$B O&*h 5n%#Q piã9' ߝ-QVĬɴd~D?Y&RAl+XPTy_PpYg$Nx 8ս>՜ɑ#{l1O6őֈUj o3:y\AIDF)!TH-2E"{p~~I^_v |s 2%E("$XH(ZoG0cn# `lJagP~l!SXy,IuV'pj`#f#$/RuqF*r֚x<2⋴&2 =ԘPBh*߱PL<]Q 7$YX֎CрGr`4[, ?B!Rx%& cn# t0S.~&3H gOن;\7wUg^}U,3e <= q=Ѷl0_vXds *sU1' {\15pőUFRH#1NGlH%#HVώxQ<@GUcG/Ļsh1dž ͫsR_ClחCEJfrUp%hMk-B> M"0Oi-$(f S[8߲ yǏ"((ZLƶ]ܯ$zK0|?~^\"t y:|CSG_h{W Tx,F%z6:ְp|tX6PȨyf0"7$,.f$~Yݜ!AR)I{/Om2<)#u8xΣ¬wQV ?Jꆃ4*YW,8>Vgך 'L) L EO%ChAJ Œ\jXk%Lg(/?tt! *b0gz Nx TL Wno׫5Y~ʋpPɨL#yM1B{u(2B aռa?g夝sAL=3̍!!Pk%8YeXii=3;߆P}!WA~߯?Ҽ%yx`wq%XHZdyOͼ'y_ȭҮ?j;?uO;?U&vUPvK3@3ɒP$Dt!H J\FL#AՅosTN g(es$(5j7fk%尊I y A1 2WZi'ӊ?NɨS<(K5ʉ+x7*~*C?i>!R7]R($Haә,4Q:bS5Ȉ {lhջ=:b>C.eQ;w,~=ֳ.W * A5Y`ł2WY v5v5/w 7`mnIGS\LDuU]u%h;x;uJG3 EsK1 P]6 }8|ٓut pbza4̄˔3Vda{ScF<4(~@kNj◲›nTp k:6uB!jZ-2&\W X1|ą**8]J҈d 4*-iQ;(MEr̤2x[2&wW"; uvL$LZ;ite!$ƿ%9-'IEBD"p/2{sjMVd=#G]gUAad\ܒgcO<(xxJg&x{YsÇ4͛:~N-'m : LW pTJF)sXv`GԈ svQ&ێv'X_R 1Z,G9sʪX:LxNѦ(&MK}tfy_t YO *L 6A3ԻM5bUUAu_Sig$3 4RGppC<,,=X}oZlBk֖$Ve\)Ü.-UIQ[[ix:*mѳݒפgzM:~X/׬~'GOf' թսu|zQSk1NZ}U>_/7c>ssnUA`-$>.4.K2G;2P%uEƄ/jh?H rTKnn0mx6+0٤}y>.=AO2`'P~ 09LΓ૖cQ_%:[/}K'U+FݺoV<J~Ci1>`3MQ^۹=ƫcC*"cIsvy@Ygt{EVW&ӗ!̠zFr70m",6"[Ѽ[" C ÊHa .ZRRsʣߺ6fmyOhSSMP &lo## "cw} tZ[tm&3gƼۧ (x"%_YADUFx| v'U< /CvUaZ W :/b̓ /lq_oNWZdLp>MػH2(HyrfK_>Ty-doW;20[[MJ|uU'OvR'ʍ`$?֡ c Ww4B_paDwFz#x4G9/ _盛aȏOxkH*k}]J}7Qf{i83#x?/Iv[$ XS$BXgUTww2jyBӇurOoJi0DJ 6 0B %Hq:/,6׽ϋ=|iN`E o ][-klETW\g~ڲy@ٯ]vHaSuLE&|=[_jgvȮ"nS/󔂷 vci%jjs;-cޡ2M˗^htMKѨ?QvwO!{Cw/(rv>nnOF{'_cTTf 4qovżAYqf/sTƭ| P5NCPx֪;bzEk<֨j\^ ଂhR5w(kFt,S)ݸڪ&"x,MuM[Ԫ HBuX-.{0\k=[ӽPkO@$hʄtx"](E|wø}=~W!+!ntO vV{v9sͺ=N96QėA22Weԣ/_5|KG3d8s48s@n,Ni2QĭS[JXq(rƟYU*UhG^b|- 6tDU8h_w5"nV+lX3 ^ޯENQεPmj~ oF\ ;)>N;o q;yWџ|Nd f:;ܕ0W=vڌWPǘl$ԆL'TmiX3#ʇ$ww2$$(k]Êzӡ3EF]YEr;_ͮyS`*myv- NO`Lq:(`e`X)M[WR&<\Q}a3Xޡ6i1e7D(=VmEmڦW`6뿸MV:a<\xݪ6 =DhRH\U EیFA͎(0+nJ5~N5˵``qWW`h.D%nk+{)dKm#Rٗx4Cψr8 ܸK& h!h&2Ybvh]e\ Vrzj7 ,Ku?G^oUo,?:[Yml/碨#뗢 G3BKƂyI=Pm=2Z,<>`Q`oUy NI" ʅt}, B|@#|bQ.܀EQ(( \yn 3K[kE÷Q-⹮jQ䷪քH$Rs9\g34G!)pAӇ^Yi(,AUV}#IY )yx@c'ISB;8 )hNzd4#*-_m6lQ$3g#l*&v5lj\nS j销9o-nce}@yQ?5v<3%XDmn 7NjM\P䷪UHs X@j©PpfFY_>+0O8ߪf9vMZ_ /cA1raӲ (Vob[L\`;-٨FnToS8p+W 2q0#րkx0ROPmV vf\SƢ_c>|pf MhuND+aT% 1 1*gcޡl E~~9 *E<vzƇ/LY~ӯcj]]> r9c b14v9숚xy),2UJs*aqNxd Ԡ(|g|ûBv*VQi@ѯTqT)x7yN MP (.;)9GE~ʹ,|YW7ݼE`lw nwonoEEEc_B@*uW{={!>޹BE1G')q"fOiN29$#G䉡G{D]QC]cj|> 2#ƒ p1,Jb@( f(AY(paO=jCk;aL1WtNP{ڳ<8|8b8t,hQ6`ĊDm{'8ٓ@OКzgOƞ(b1q=UV7.BŨ'H*BRblyb S\wZG7$jj.B}|Na$ɐ4Bǩ568AaH _?n.!N5)`q 1rujaC3?cvQe|+^ 6ٱg,(Jޞ2<$c; |Xס6%5XKGeyO7"B^kD 1kcX߁ 4PiW2PeL)}ԓAA.k1+S$SPPK`laى",*T2{Jn3G6}lnC~f&}Kc{wR ?d!qqC%N):!58mAjn&_M_0&2 G2<D \{-Rl EgV=S&BnzF;+QnE_|NA<.RV2")wy"9k Cgo8X{x~η\Y#&KØ"ЌF,z}C:EkB.bEaLbQ6wylsXpVDmCKV< 䴻5{n0Vx}݆9^un0wJQVˊL XQS3 6cz,E(mњz kzXT2uLb?htXX$#!vVCvBߵ]awCc6b; v7t<߅Dߪj>4ȘqVCx}@T_֏f؆0-7uGl<tw$SLÌ$z Is],T[]!jSōa;>n2A |V=ЪC~j% aY&&Nb"I5k4ge=2(%oTTK*GȘ1:Q:ޛ*`E6L!D9Ze|'i-m-"QZ%Ӑk|xCP7i~[h1#ɍ<FG|pQy MوG!"qĒ+U5p3aroT˳IKuZE&Oۃ͠{d̷++tim5k\ ]#="tZu+GZF >-V~!*!oE[48IE!:` `oTG|C;AIEta 2a5zn +YDd4-m*WzD̔CuL~&@f(T{uukGCUunPe!/%j(Vճb”mYyR'ãk4z)媼K]JG+{7@;ww%WW_4}m^>-Woˮr/[kB7S_߶$ŲHv%o?*\Do;`Oi ыxբg[>6L|@x~_,E$cg:Fgսi>ytfǞiY&,7/{ /W- qkj n 62ݟN*~HJv/? %L-||W) eOw E/>REGƯRFvyn.SH´ 4$6a3)M!]{3-֥ҷ &Xǎ0q[K_qeoŀ(M3iF`Z3X1 V3ޗlW/ȍ,Hn]t"̳a (7.*6{mK=2~ӊPI.̀aW)SǾɂr lsEzdV0Wϙa{AWΥ۬t -j+-U OD q<$"1ԛ%ie|`hvܮ G=iE}i0!{`N=^MTrv9T#כ0yDhf}->]-O4kը1zO6a$U vm[OK5M.ӄ AgK oicD8m+ 0|[4' ,!44*gV*)ɭ7]B̝DVe4#Ɠ\9͎{w:IKGk1Ľ-Nc1,Ӣz\4tiTʀ?:RgC:<7#`iu.(/zV8bS1A_Y # LBl> \n6尢|zX4, Fx÷xqi$e@_M|0&9^ wJm ~㕁)l|*8բFm\d2Nх!9dIbA# dwJ\D1շ%`d4E(A3&`@ ht((lsf[!!=dW đA,uWkaL01XO@7t6_1cֹ20[_1Ni⬹u4#d0Ľć )-y 7 4Uq 6T7zȥ|F̚zgFƌ(bϬ1τ8 2QiW2å2<U5I9MQVF*̐t bi7hu6zH?JDh1ĽHbC n:80d'?xjq)%V$D.= sY}}@/5!=>cz,*Po$A(CƉR yCe" LV0.N"K !j <6nK֫bKvvHh"ہ)YCӰ;2D5۽5sW:T;a7C?D$VrØ|Ø"1,*UU%F{ &I;M?~ !rkaLbQTWG7֙vX<5$X*5ect2|r45:ߜ?ݙ ï1;E PfakP bQZםZgbA9e)N<)I$BEa8q |1rE TsE}܌J: USg5K8@ e)IuT$ןb=x煚煨gO yCjpX$q"` T`} Yh1XB>:*Δy)QCjSXp4mњz!!Z4 E\s Cs7kG3+ fWzqP+ $9ZR\y@@$.Z =b~xӲ (Vob[?"hSETo, W:2*빗QydcW~clZz# y/A~ KcȳM+NP0@+ޡjfR@BwoJ}?Y]pfF b\%$gY{#+qañHi`pf 3Iي=ڞ"R?l6;v7UEUbmD@C1Ő!4IlBrt?k2 fjq=⌍QAC QQ0e x~1ckO,Đo|8ƐF X9֠x>5dk33dp綰mW9CJ@䁌Ѫm9QqcXLƈ|sR~0D# wI } .kw4RsD">vBY76傞;>0z ~4s5/gHMS]*ZJЩ֒HVz EZh3E!*,($g6(ù*Ys~L{̪:UMu/;Ke ZkjY|D eΪqSw!86k.d98&vU]x#g3ziDU>(tUnjb3dxc˫Nj5)FԖAwϣ*l[iE[R22DŽ$219ljJ %Nf>?nF*T fg(Pq;j:#zf:j44"K`|@ SyyjuJB Ilff:bTM:bU"cM"c:b4Ρ?1ݔ Sp'zhLPtf1#&65u?*bFgT᱂.6J"l}C.D(I?v6{ !~Xa XW4k,7tC90JS&ف߅$G$/ke|-%`px9JhWic"笴[+t~{ѣ_x:<,Q/YS=5<mXGFlY-oB0YqbZ2/8*^G_\^t>~MgU6q'Ň&X8c(~3Կ:S=F _7.mN'D) 11ÿ藏E;%+]4_ F,KŬP3-f;|VY{?ߗ\*؟!/*溹kWAFQ"dTK4Yc'Tḣu4sAPR }R}5Vf݇"3ݴl6;zK*Q>[r^,kx<ŴkQk[ԁv<9PT])1CR,J9Q` &h a2FdBMlu0OE'9]\%qf[ͼ}?2FS$c.}^҄hXB1S̢vU_]x(>-6yM[/Az doG=mjsGP`9&yuд,bEyw|[{Wb>y&^DƉxq7p]\NVXK-/;\he|=I‡`gSKMlYuza gDRZòH1Gm^ۺeRdj bF{7Wӛ1z9B8&"HX@#{qIR9K3n(9]H-=.OYѷ|Þj$a=,pTpуse1 PHj"UfS#xJ*9lHP`T҃Z;2;/CyGy„BB&)h,d$a+Ot8&gmM'%9ě434C ̧^lBqMF_+GЂgz׉`21η`}K9R$f?S\bc+~"{; )D'񸾙4laVf%Jz wL;T)j4-u6k`aaӅ.qkNzTGs{5>2@A[ntGჂ:==˺L/&UPYntK9~X|~+>>}~m ?0]zM#nE HN_ep̞.(us 4o'5hӁc`:{c!JPKC L~i˘1>@0HzDL[bϸRIYpDT38%0TiH s !\^NUzDt1`}^ _AmϽ.\5hX2& )<$W[Z=\5qU'y!]KdU&x9zzAX}4]F$6F/aVE3O3KBWG?)J݁c`$ '|JѯM9pb] tZIg]SO0Ctb5t^ "虹͆8d#XJ޼ƹ2#:?DIzD>Mc)PKH2rb48&%68%!x\WsoN5.w8,A u:fHPg͝=_zy ⁅@־U<ڦ9UiJCA~n7C΃;p LzN./|tÞQA;p L.yq: HVI$繛J0~,Ua6v> Mr V*Ib 2%+]a-${/:OA:p L.q{} l2k$Յa`qnZXJ+ L>REI>6tT I2@|Ɏtށc`3}z=tGo;rq$њ |d $6L;:p LqcƱ ,Ra=k98V*E0+B),EpB娵р 8=ԶÜ=CD/5(x?h{!X w@0X#bjBoG7N3WN3ڮ3{*IVڼֺysmoM Y&yQ9'{k_o7iǽp^_xzqFnᗮtCWn*7f(,)?ܡr(U>|>ɮ_ozSXkہIaLl"D>)B~%}+&՞O`o),W~AЯ{ںc,]?j-?Aư_hzWnPr}I3-A|kQ ktw]xT{w1rM@,` ?$vqՂ:|3˯< qCwl9&_g&Q*28Ťk& ~Lԫޘ̾Nda5 w4Q[Z*xuuNnFζ Xռn|$#)ȀO;Y5htc}| *C eB nVU7hdz9AqeBӫuxk:P%ӾբmJ_ .KY[]U@3,*kpb2FdBMlh+ss}뺺1GU"G"4sfHi)%,N̉?oҿ~aXXWb:VU}5Ÿfg׏.fJS']Cj]_n0<; .&K<7>B]V =-o>0Ǖ||_t/ҽ+K3f=(z̫h1-<8\Eӧ5nXΦHY7/t9˛3C ̵2yzuY\rf聾&_9 &,cJ:3gI%I<6LrXuD2]O= 2?yYԕ2O#}Am3 '~6˧y K۪xL㭋֔[5SMfo4`7#,PǟD@Z?/>E7ez7[fQmFMuKlw Y`XrL2]o|qc}[kWU\\ne^vVV>nIUXYe~n9/}m>0]\H[)g.~]kcVG-}A>?/g{ϸ.r p`N$D3s)Z=Gfw>}f<~P UH yyuyx|M.T`f=KȂ-*3^5>uZ F0#ȹ\HK'Nġ4s>0N<^>i}`vwpMΟ3'kJJ& m)KRC5ÂCϤ̦B~?Fvǭ]n~!O'ɢ`88uXIv,5QRG#{jVn_Ŏ9l<ƶ}NdwZNSKvύ?#fg%oŋc "1D5GDVs`EҼ)%h)&f?<3 42Mc^|d'!d2Wk-Ѥ |>.Y^';&{,ɘ &/PcKW1hݮL >:(}9)"7u`#&b(+@1/Xx:/:]۵I?m wX=T3pY65x}*%Rňq `\DaA1.eό^v >Ct̽~8Ԛ}ZACi!C3d뾑ng|[3},8A9$y&Ղc% ൨J 1kUePi5 ;زg [bkb>3~Yo_ǶR|:O"6m?'gׯmO6!?zW07zgK+->x[~ZEY6KѴut~NNEYH-"yPϏ ]2TLEV{`7ݟhWQad= Z(ΠC;_M3:ٴ݋v)KZR!J\=]ou{|۪ݹ'u!I3`W''Yyy]_̕ҿ1MN0'f σ?AGO?vm3j1Vz%Βd>lw Fbb=Lϝ-LC5o 8{ݒS;e>c"sUu9+r^ N!?.㿇~h|n[zJ쯆 Lv,y,[6|J,Szsoȶ-fʦ&X3jٲfx}yLgϫJҏJr{'~H-}3HٗRa>g*+oYo3 igk-|ϡH+3u s@8펨8+ 9/IX`Ϛ d#ݹer;C5:O ܧ`"kgx2]hx<;:> sVÆ4v.rih(U\90UFURBoբ-xĵ7n]1ǶghG1m`jTxsTbeRܴPqǞ'ny3e$I}GG{ǺWoԒr ϦE.*jXҺJƊZ>\+.B*= yxH1xa:NT#ŜCzn^}bɶDva]1.|G:@ܞ}Ƭ> 8LW/ٶ=1 lBtc3cХQݹU3%'9ՌIc+rdWr;bb۩O}Ghg̾ $jhzA@-#jQ~q{٭Ǝ={oI)Wփ u9#hƽщ!vĵF10:bY`iCåm=GQq v:Kl KW vd74_VʪZnl&{ meD_6=sl]wQJAe|j9(ygm[WĿNiY- boq(s_y汬?=4d?oо%Z~a/HBd>ʇxp1@%vs{joE6mUhE8n$!/#Y+y5zo|#}i+={n`Nϖos>Aל:\X>p Q|Ektyu߆ s{eD_ \uoCu648\ V ^V=xɉ*;֖Fa4I gc4?輊!EI, IkrPe\1D!=u7`xI#~hHGU۔3RE@+/}vmMP.zGv&aA(Y d֕J*Yz9^ٻ*u2D뢲biY{鱂޿xJZFdVcRJBJ(j HFG )݊P K,=DB 6V4cVTEDG%Q~.;>{ H0'U_6(yevhB,lkŵn *(c`a4 Y  2:Z ś"kHăYH%g ㅧ'ݓWw^Y㐤JFMkX)>c9Fs\(~9JlZ%V*JuH9QJRڳU/cPB&v$<ꢅ0yi*V7x[ Cx8:o]ڔ$QΛ)6&X5"aȚ%AB|9jmEZoF>I+~14.LXCR [ ^i]I hn3,HؐG.XIWV Y\wP$BrwQVm SH*eķ@4.Zy=F,`)$\oxH0f ,Y=e`m+>vVqF-M* $Jb(jl-%&z:;LshCަ8FlK6:5h KKІ.oՂّsLLe]8`DNC (F Y osQAU$N{P2W2JAx.g7%Z٤j@\$U,V(FHyeۄ0YF$NqsAbfH"z]|H2|3..)fC0a nXD5G, !p+:K;a S:"9h {OJ"/K6ojмX8zfu kV)X%`@pBi5%^V+!m3N +8oVǂ+ @uhm8/ԭݬb\,|˘UpNE&*/NNR"M?o1Œ>6tѺ^\o6[!7jiw-6YTZ$0 ^x0u@Kv@82(}Zw-+X{.+ r*=``j2pLF'qbCE:B.%%( {'LL*A>8MۦoK<|OA |Q@VAkթ-<oĭ |tXDunR$ 8U(!;# );-&sJ$N|_UuzX:w ?qLkHMоb =G]J7 4Aذrp[]%`\ F]ME\H^&#(=B,C)9h2y"ZЎ:!v FdA5 ($#@{3)f4chuJW#T| d͈ pd".Z]qbmDgR#0IDKF|d@RP0ČEDDbc8bllmI0 5,qX fQ:X kуЩx1a0 XAhfu)X֐Tk 쥪) 8.4530rRkP>Kn9nPJ ȡM>fxsC < В197T)R(<P"2" %0T^)ɊG5z-6d`2'3+ NJEs8(@bG9~˟4.A3* 3v(jaQBAX jOTbq,lZ`qOs6" |19kmHv]& &$a16cRHJH%]IK M^v=]]uU"5R&T?SHkPV gF0(gR,JLTH#T5%i\4̵~dîAp^В?䃨6tBe_j;J06L PX?˙,+Z 5 Ѓ z4SpFX#Jx>h)=B$bIPb.8c$ %/H~avNAu6UK'1.`ZĔ""CB%v . A8 EkHh;g_`j?nzA,nZ% dhiSe*SQM΢~eBtaRp$.T|Rm;GLmghmw}Qx«,~30Ws:_ݥo?tCW -~Ym?wEhuXk"ثA4@{9FMNiI7|74}vєDi3me( ЗιU^~u^$mec9ur zbN)t͛R_ࣿ^gjppnM+lJR5Tw,Jw)TKDН䆧dLB0ݨ(~2  a`흺 XPEԆ}8q]Z)l*$8 RXMkwdje/juеh S^%;:c2נ128{7'n2oGjA/ABbLq74ZP՘ETL>/ ߬flЅ["x&¦ryW4)1.8a-9D: $\#nYΝ4:47c7B gx@W%i=ΫclpN@۪ogphe_[ 9h,]lj=QY\QG0>*QG<} _)5gx&KS:IIqm#cQʹR#lOTiAɸǝ]7;ӥ8ӽɃtrZVqß:nKԥLVYpm |>. d@k-.86,Fg8v~ =l|l8Tna8t|4~ZO@Q{%̒䮧1vzA-ȸ .= J|V.;My^xXgMmYΗf1lmz6jFB'##_jZw.%e=;}yVVK[onP4*875=`=a4.dZ:|ayJ jAFЌR\Ĉ .FF$N9@jFÛ.`:KYP5:2r]eYt+xTJXͮNc?hN31~Xgڤs7EK6:x`X;؝C"n>hxd$B. p摽k|^(aF/^0lӊ9:mI5E.7kMsֽƆt~Rw<5%60NY\LΔ2]O#c(+=Z;d)I\B128ּ؍Di;-#Ek\JNpJ pniU7Ƒ6;VEF4BWOŚ}c\pKk2!$}#x)\rMoud ^p^<eKx3_q#I 2dO#c4AM{gmq9i磦GFë ݔ򌹡=gt~;)marsQLDhWcJ]L l C!M<u >ָQR ^kd.Q~gOSLw''GNZ)bׯ{ku~ղp7bD(?_Ha]TԈ?^hWZ7w`ldKwzsH'sJ{]L82vξX!&rsBޟHgSR`tTKfrQzdm"Q]ߖƜ\FZW_Y?k >߳\ʈ'VF}g?͒ 3˂IE:} oˮJϯi^l{|MD]X.NxXt~q(W/e||L+W?OSwj-a쨗I, v2}qwt].\ldb?lYl%,VՏKzX0a:;a~/? Ws| ;X"d8٘z3~_Wzif7okfu/7}ߠ|do6zB7&x5,{:ͧd^ׯ&ɧNTi;lߎznGjrϑi_һ״kf+W@TNO7PM.i.xtZ.Zz-mOjՑ0.li0'aPdS1k_ϟaK!lz?%˽_7L,<ǫ!l+T^I_K}BR̓ק ~}~sxeWٌl)8ү/}w7//f%FO ;Wa~|֮QQu?Ogwx Y)XO 6H,ܿDC$}~[Y FzCXӗW="b+ӍxuHէECNBsf?50®hġw[W{xw%lfnw#'~"O/XݗmjwHNƼE|K3_ HM,I %^),31X C-Xʂ_*hO*B`"t [YVb>q~1ɔS,xJ0:lܳP>qŌ6Z-b&^a Yxp!ޑAC%G^=,LQ\`/# >aV+X!U6·0<9f /y9YBy&jDUPd ]&LP3W*QTcPm̭ʤ7) K'j qZPWVg19UL]%J(J" fpJꬁ7̹RZhù -4 lw%A`K[!蠼HOX R ` ө,,PWM +S ),$ .ZcT),s2i -EL\:X," ˥trXF/`B]^j>`" T q+)f` 1j)0YC3ut_7UUN%KUDĭ1  +,,t&|aP*a*ʦ!ekT66 JAdj4e `I p2 BP"q a*10J8 Ls8e^aY,= dͯp_8ٻ6$ vH~{I`:a356/4 }5|AXl3\ WZ!u]Pl-s;]Vvu$8E>/)`peT$F^,.zFo(@HK'Яy%~Na8H8QV aDuvXoVu댉0 tj)/{Үa,3ONy' GcJA;M#O^xvlL|ЦE4r5* #j٥>'震HBC%W0J?l8"3U%^E Hɝ -e*=``"&8`gnaE*,R0F!=(9@id[e#QJ"HOx [XeGT)H^\'6X/xDGM"p&,:{P$[gyrx*ƝfE ?-&sJ yYI F`;[Xwr~ !N‚.&hE1 \c#. m^4A .p_GסT]R"Pc22[y%tMކVhG1sayW!B.ds%">(f@bW.*8A'" 'BE LQ]"ALĨPMd=&3RO࣐X2 TѓJ % EnavNl!& _;՗K'1`.ń@ZĔp2"nׅK:(@q9(-"t^L(]w*;c70&o`j?.zBod7N&R~mXg9JJ[ t#OQtF z[L™y-FBe_5i%4ߎbt14E)F{ojhjyq0|8!BozlNl|68;ž2h>g'-+\̿lFOC8O{p\|Z8-Ŭ,*yL@C[=u D<PJ(@[jY',ӆo3 w0_GXӹFOt6YOlyů++by"+byfy/I&Nke@;AEh4E0g20 ڸȥUPkC Mu,Hݰ٘C_4h/Je@wMzM3~Vlxi.*M9ft%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2Z%U2ړ%F/Ow-D#"!yAʭce+&]%ͽģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJ<ģJy/ڷ_Ys0Q]1ULNuF6(☴U6NLn?xڥхdTŸhJuIi߾?[ m%_lV&Wy"}; w7:n2C@".âTֿ>{E΋տcgԇ_O_'ذpNGplBJ+ X ռܟzYM hU~Ϟ6,ߥPq=דD1nj!_~7?k5*ӵ8ߚ}]W%FOM ~sW{`MiFټtb\CgL[bz40v\w-k}?A3WҼ>Np'KRCMC>p}@yցcT5cn0O|]}|8WBii<1os Nk_}}&;@ᾰbƴCu#ݓ7+ gL yDI؈)MVKi}BKҰ:9&=#WV63ߏ*uK>jb/U269'.IkwJAMoJAv&T;Z>U90zɃߨOt#k̃7Hs捡I(e︹ $ms"mu>/gm/4VQ;۠ꦓɼM4ݞo}7ة[vC8tC7Wi!<Ň6xp6pGOyazuYoW[O?5B8x|Pl}عWlGOW=B0o]Ȑk4^饅^Sf+e;B ';@# c 34BG!}{!#R؆39է G~N:臉ǭ~%&O?k]ϙ"<wEZ{,+2vYg8|3.[8-hjh9`rh m9f@??~;%6iַF wIF<=Ah}'~'&4V#_4xI`-&FK!iyt~D(ůL齩5NLΛo`i2Kjinʔ98N+Yxl3U%6JZÙ_:G_W||WϪHghńR%.dDlވ^ElzJTJm'CvBTY,;]:'rVwE!9ǯR:# ).eeF{щ3`إ䴇p?HZGPjwy~ #@2mh\Qd:M+ ,1DѲGXT~/TQ["$F%$r1v2&oWep ;^lL)QW?h>ZwWt*-TLF:Gdڜ5Kn1:X&MELdthө)W1Nk%i3R䴏C@JΑN;"TyNg;M^柆:P\ o<9{g z^qDWWxd:uA}(YN Αad8~"d8)^dptYڳ b7w5),db"S*huh\.m ^0g]CJ):c0،f eP˳KBHeOۅ~yre_pcMFӋ]q75orkӃ>uC(ĀJoV@T6;^ z{}|O=#'oF-oS$ gTmKg7}bŵeoox` ][6pq[o\i=olqo'ިs23\g~ΚbܧKs4jO/NW Л壴dθɂֱxX6&}C,X +u _6zNk0aC n/F7F+" P-57ߔ>&S,!oӾZ/T}bx~ du}~UC~} gl1X/Ƌ7]yvY2] Vw.%^,WWl`O=mFRZ7Rl>^jƣEӋU@ݻ%h.yM/Kl9cذFB4kه=*I_8믺6g߿Ym7!hu;kmKvT/3`LB]m)R!)bIEJlT6LYlT9U߹?T=JpBw0֋wuy+G|ٌ+| f]tn ˙.lS&@ g, >gF:sqgѮN!Q >>hwTUTF/( J[HƤnĤ+|8HJ$mUٹm2CokjKnCpFu@u3 $$r5>8g8gCS5D}W}oO}^D1S(Pʍ"VLYrGkZ HƷƷh'5NIc7'n104Zp)niHZTp;oB!gh՞2#,RFXp4[v^_9lCjL\^ed.q=7ṱ:Je(R˽AǤ:Io{:{@+Xg͚f43Vn+"kvYXr!lT)RR%M7DdwDðq#֠iuٹe=,3 -Z2zH?x|v=H E-⇷2ߜ+ա-co FݵAVw^ΦT0ֳ}#TbB7۬IQMdD2Rh8_ [Z̚F*?s5{,ι|>VI8}땳(;~.V_:5G yl5hOQE*ND8VqMEs[XNj,:rMUh"899l&LIX8F.u6Rʃp-^jF s[#g3]gvYg+hBRRg m6"ᕆۧKo=FX 7pojɝ6J0ycE c8%Uj(k RSۋ 2:QM괰fq^3נyGDJN*,fEjJb,A"A$͊ X2TiSMF~? 뒰nA(V8"8i0I}&l^sm$*Q|7pu4m#H "Mwp8)0#2a%.G`Ca'D .ihu-ᤥGGL%C5K_tkt][RM? ,->dUyug6y;KÜw~Cx/3O &)H@t؛^O[& 7NɄ#'"e0(C1k৑ڥiK\r++?Q>&WKE M̚MEOߛ]b>i weҜf{'C'Z Q]fyE!& seZ[k[3UuO72o0xj 8Gj4Cl,@h} iXCMpп-x1f7y`r7/#A:dӨkʺIs'/CH8}pҭoơ3m. +7Ŀ޻)l 7T7 ׻ lG_wD]^eD;I 7_/~~h)ƛ Mbѳ.jƽ/>.8T{n~DO7_ %W:ʖZ.8;~ jgMoQR{T)]UXLf|P MhCxz-Gr-ǦȾ( 8Xc8&#! b^c>[̶Tk=GkitztP]>pwgrDL RoΜ Q,ƜZR8UVTO K@ƳW8LnwdtxbIGG[j{d:oONxof^IYE<%2%E%2 1G(»~~[4/#Ϳu!ڛW˛3irޕ0n{?:26##!-,'T&XLmD.ѧR7Qs͛4}y=tgə)93%g̔3SrfJLə)93%g̔lzAXĘV =7` 7"b790bYsBrNH 9!9'$䜐rBpmŜؒ[rbKNlNؒ[rbKo%J[܁8;\\] $0Ug5In2I?KEWC푬I/M!*X Q)%3Gb=vr簫"+E1Vx T. rZndX*Jxe~_;{ǛZ^ܼ_[f!-/sB^~ޟ x`10 .f`rH8+ʖ>c|kvt *\(wp GzƌƁZ 1b"iZ+I϶v"|&-}g?vX_n{ d1]zl'|6D%0A -=o^<{`"k(FD>#s܄= T &\[.dA38˰Jq0 -] 9Xr!-)RR%Knbij(aH 99!9!gp$"q[.Ai˜oΕi16W.2sbZXA0#QŔAax {6emEx$MgS*gپ *D ^_BKkmo$;p Xcy2\O%̰B7^jyS*!x;:^-B+A8P`cҁ~ פV &uDhQhc&zGe-3['rym[j;+NN @ Ņ" E baZO- h8 *~~ [9)%+^Mqxxka ylylu[nT='NzCpRqld|c˸ D-Ket_l>ewP_KbuNvbSc6,k耼cb@2~Ϊ* 8^w޽Sz{)RW~/qn'Wo>B[ڏDTST-2&EQ>v.R1q}7\?E\whxڔT,:F!&܉TSHPlӹOŗoz,w"s]>=w_g"p-;+ةg?+sԢEf\6e\6e\6e\6e\>\I)Uh>Hqp va^hh$«3L+,[UjUUڪ*l(%D#Kl<8G¼uMU.9ۂKRn8bx(n29nv =[=F,0"dvqe(J gIQ8n˿fD #e,h3)֘fx?%ι|9z"}{^@xr!"z4kD{XVHB# '*.8y]Bpl=bA86QJ f*Z\+a4!`BL1jvek R5*/2_wlT~(*Xog mNT[?D['~:kx,EnrF@afƊAƘqJ*aԒ9>FMQivwUW׻x)kBXѲuSvQ9?dF۳qV7ԗaI)#9]~kzѥ,&8ɐy.(N~ܟAX@?r׃ah@ -C29G @vLaд W1N~9΀^ġAgrlFPdZ\XX.7| ;z.SP{.qW[u)Kuek&RQo\lU4b"Z2Ԉp{S.ƿa'0%I5ȉ u$~F5xy=U[׳mlb+4hp -'Q5~!s %rq=xLD3弃u3]nnJq6Ch\|[ f)> ,=]9NhY@nu-`cE iU '>}=8UmRX?Gr|Ϡ:ݛfy\7t?__~?~G~ww$7Yqi B켄vZ@Tl;SS+Yjjo15ӼPc^Ka޻LJ/En}{[zQq0tU]#?9ഒMCA ,esa0|zU*-Vf!".f@շ7+em_k&_'[#EI.H*r0"0RZMDPZcYJ$h{<mmX-irųiIWSoIW-iƖ42!CN~>N+`$r x GHLGzؔWLLK.BOZ##T@8HL F)D'=Lb bT(JaM23,')Ŗӝ zZ6<8$Y"(__FRY5v.D~:$Iz&Qre8-$v6 D2`<9*̵{jw"zvڵ&\0rV b4W}=[ +[`F^k1WJ.wݼvuJ`K[x:\Uk<1:Ltӭ̰`X-0?ը} rk,QAXa{5WOgdUNqc# Öv2/To~?. %'8悚<(d6R'OM:z{ǟF ]E[dOtNȽT]{ꬺ%df-=&0Ϊ0pRk^ջˡi25Nz|yf.I݊eT烳s_FjXgCl]UjC[`'*TYj yW{w$F=͇{K{X$B JY]ٽ;݂m;x}f9![˓m9- tQ|-k-@-D>.'O;ʦۖgu5|iTroU) 8xSHi3.915=%tϞg~ &YcڃЎDx8Dnc--c˴D9ye?[ܪz|~}0= /q>v4R|NͶ4۾n/Tct@maǠ^oMӿͳ쑗ksai(eVFs0) {\>Zoѧ'0ǭ@vW UEKw;iIcNreF156O9Wh>`omXa]d-QTΝ}JkM(uO{<:}u<PJz:xEo|7'D{‡㕝dQxuq֞P=u|Wow»CbI . 2D)#CcV` {lFoEE$u #!p_1RE2){n,eO! )!z!ZIB${jxʞct؅b.ĐiLje_jjG?/Yi\lr(c㍯My(>}ͪY|O gͩ^4y^\>!g6Yyf + D0NXwV.nyl\PhHm䌙y qիS`2Nտ$p>[TN[<_%;sTOlH8bXɠ0'F6S>2L 7?@Pt֧^!ʌxnB+O3Ux'LG[o\-D[-UoJfV4^nhCZW Z} 9)yٳm։j ܁CgW}6im̿#3A?Ai]`}8\ُ`lVx瘟ܧj#^dwJREPr1! `6Y!PxM[/Ⱦݝ%~vC#H[GHo=^|b~z5"q2 񘻠 6S >0tum]㯆=_zEqySw`4^,-+Yxok.m՗ޭohiVvuS{X`SähU%}5ј%ho[WZXos>eݾ-4-͸1L8  g Hu#P35gL>&0"٬%=|%Ԕv!s0z1 >O#AQj,tYW=F(.Rp`ф9Npz'9G4s88:clF VwxTPxUiUxbbqW &*UIcB:1B )'N F&ؕfIAQPx9/rSm5ΐQ y^>#^IVܣ^uc(Nq9뙱1$S @#2$9bO1Zks R-<ym[*a;?4䏝Jt~]{]<:(8-",,_01gi81Rs%4jiݏL=='k qd2.{PxʊgJ9`RB"Cpt8e84_.  JmΜcqN1bB$a4_4=42p:8YV&'(+~NK;t <]=ioG॥O&A'ѧň̡l+_ D#q8ۚ!FZgL-M A$s^ | EUL@ D #- Z!^Sm9ȫW#F([[M A-|YTΧJUHSaA`V-JM A7m!q$aM&!뛍 /SlD1lOqia--+p0سۉo m+2@"5F@=º"67a7ʶKUMBPJ NC^4F XZ!^+-aA/ W>>Ƚ6o meDqDR9 F$^Jo6@hxc`{\XqG pMTC6~IFsA4.J.8!MBM Bu}Ak)cdTB7VͶ31syKB9Z4~ʬAh:(20Ir־ 6RԹPV(X  B '/[& KgOZ(Ip8]&BTL0UhDx(0V^A{-&WGxW+>cxdZrI'J-'az D#i\0 (Zc ЉA< R.r 6`3p-<ϓt'DU ~"Bf]0bƷ Rlܔ|=0<_=>2MW7/[Uu5jos'7rα7.th7^yo`ٽwVWCL_@de/vMD"RIbgoӱVYJ {SJٞυen|0xP{<B#YI2uӏ/u _LLfJ^gHhb/ʴNxksӽT?I8 ƂPL/T`'*85={p$KGe}dDLRB<Ȓڎ.RZ{7`?5zxy-THiMBi3yQznpܥIti_oN,&Y7jG"qoHϴeĘȵ,߼LY yM̪D4&p'`6{胞=WF>!wHz P'bĕI%A6J `4'R2{ 3Ѣ]{VEղk+PR YoT C}@|ӧ!oN{/)$OGΥ=cN0;FKWyIqL|laf[:& *G0F:/LJ0(X-"Oj$ltl Yd qǙ;v X#F3A|¤5tV4Y{4yG.kgQOQփϯ.k7ed%, MNu o"\h@ǸZiq&w wPJ<-:. ^uxಀ7z\殙 *R%XRŽJJny҆Z@#E (>8UZe5em"XIgVS)$W|+ZbT*䦧hlRT?糴w45Q HdsMTTXN)XDCi)gEjW6]z5$I }^I©g\:h02fKe^W*ĺ{]|oN;sV#jqa}]],x"9g0ǟ[ƔJyBf^e>INaNzwfG8y{=Q=JW{!=w* 0߻H"g/ a]࣏J2F?EnpuTLٻZh9HUNYqt6J(U2A t[(G?d 2H|60;7T>W [%^+r}-Ԉh >C #crA$~u 8t,z[i^ _7o.gul 2 ˥X ( Ϫef9$Qo4x9- NA; #I6鳵ԅ(mqi`,U|<\'X'iQDnu-cE"DĹA_7Nl6鍫~r| P 2[?^| /`>9}Mݫ޼@p޽}+_`[7 (5;7C+0|hyo;S=VEqocKI/پ_,1W_|t<7%ejV;K1 ڠj3_ Do`+? }y|aG6&uy)'&o8o4~}|ŭѪ7q;h< |IEARVuZJK ֑nOvk Km#|d[نC3YB6=Ndb3%1ɘc377(f`)P툒B'MV6u8Q|7V`5КL$p4f=wZ]vfv͗vˣk<ѹfHz N;CoЬnO8N—aQB=r7.ø?qnTn 8H`y$gIrtqnî@)$ AiQe $W RKΝt1x ҖgHDs̄񖞘߿C6a>l+N`ļ;S'wB+ۺU sL来q=D(־ApopDGYƳ&f'vǓq1- Yo7! NrA NjT|monZcHa0d?T38OGT|b)HͨARp (IB DxΧkliu-[&9sC-P#ŀՀRuhnƅjڋfznC;\Unm (1h#P,OWl13-m:횲kYC4gņ&܊5R c.ceR>_Sֲ#|A Sbۼ́4W+U12Fׁ?_[FGy}n\37 ө6ۙm _~00d&-/y1muw&lq(ay ~O 1\>\8?C04ݮ(o@?5Gn#*{ cilr'iH=ua" r+ ?`J:~ѐ.=,o^;AkP[׌nJUڬiV1D(͍rY~e3j -Yx {М^Nf] cj?]&2)%f)Ԧ8d.>׫1ԧ6h+00Yt#W{KYyٿ.ܻļ|[zl^ A/?cwۏv b:mzi]=޵?m[>dM]zVL^|C+$3ޘr/I]KgweUc /ʿ_n/VF0&.ڲ8[%p՞?[u&y1-wN={R%]'G_{ٛK~>QL/'jyNW8 ls>{yQ8>4Ɨ~lfv`=c ﭡtۏ#}_,zJWW̮tBrwZ~kAw;wNERGNm9L1}%Yrl)q2lQwF(9$9D0L+-mC:I[8wSj!Vb+}H^iҙNouM`*ZyF?A:ZiIT|ٝe>O-VrżHN[3-?%],ȡw`-ꑜ@u?X_*eNʌTW,޽]YYc~{U^6`y7ʷ(R2'tc]@u*Y_( Ngݕ_#ӌ^?M,B!QQ&*aotPBz_ TL)qL&J)M\R@54JB' 0goQ*/iBGڧ|^IG=0pԮͪRu 4!$Մ\M-T"DG2Lh"Shn~ou ~k@2©VTE9c"b&6(lHCNSDIAdgJ=?zaDO}E 7Foh="Ӡ^tlP) "t'{xeX>n4))b&SWdf3Pᖮ8q a|tNU[Ǐ/*I,y_ $Zkz틱wEz_yDp_ft,kB~NG"/Gz!ǔ|_-eQo}K sj2!:(1LEOϣ|o‰#cCP_d+H.&=Fc-/B%OgK|UB]4_z#{15a"m'_Id' lH/ z+__#dcY 8f6?6ƒ~_c}lphLN~|Ŏv_cn왎ZzۄGvpfc&U6d%5|Lgt"}b:}>Cvj!&GexYo32VLgnr<3vlig6@ћWrQuy]WUKϴ=_7:l]ʧGnhl6P}EU˾@dIzwH)+vGykA6O^BpH͂]ج\)N /6&X,AߓˁIL,!bBfO/Nƞ0iZ >d Ðݧ#9ccHO2b+sD|8< }~S2<@sIX8Fx8+2 }-$CcLX}f h4zH?3 eQh8vQ1z$49\Xkr>P/`gFn71"8g Z#4(.lC(> o8E1zA+3X#Է\бV%|dv7sW GzH(D!*}ZA CCv`.2*O JxƏUg z=$p(B8£0~ӧl1̉[Fl̇6>G`}jX@D<4 |A? č&u"W0;.pL ?[=Iv[],ԅ-88)~ߝO&N>eN.dg~؀sX0q8's{pzF~|a#KÇYU0h-ly1202 Ɛu!+;sؓrLߝ OM.L"ɪyX\8,P٧ը~~lQ- j1J"d$܃q ,<ЙOGZJ,MsN6g6g Ԝ l9 -*}'_`=S %V|P1_x*@MςQ u$wA_? ަܟϋ+ˉ)~vw)ߩ5yVpg`/?diQ3PM'_jRtu [~ї,{,F}딩-V֕~Cʹ뛔u?7LXgݸ6n_ X;qER- c'Ҧ]V^ˬ0pg)WnMVb+|tEPKdIi$^kx"7y"ٮ.w~):M4nR梁wüRޞMxKNLߋK);'a'j9Ħ1* 1,EX2J 1 HS pN*ˑSN0jG. r^ڢ4K[ъnhxxS Å ~٭Kk9+&݅*gpV/nYxа๹;~u%6 lZ4/W$brknv?dfh[Fxg+ovqo'*W,됂GI_ոߪ4l7G74fmdp$%X"i81nyQAKD5:Ʃ"Hi<޸ +Ȧ3gm&d@`ꐳ8ejkGe߳h^3[4+W̗S kh^byxfxX:H'ubqSiߠ4 'I=l;s:]5zEDrGd6k9Nג LRX}H!}”A>[FԒ\2Re{mҕ'ϙ JXDtvhִWs=jwRު"M[)&UNC7=N홡Rcvx/((s}⩒~a<hdo@/(n{n$>EL"N_UZ;r噪 #w9vZU'⣗Y*Kw:1'.1=. #b s 퐠Iq_zB۝Rt-ꫥ،_^|ٝ;LvMOh|w 'qb>NlczmZ(EWY|d]pvoϋvv|l?]kz> g`b43pKi !NMpPT+,W8BhVq( HbBiXN0I'R0&6T0#iFtbtT5^LU݀TT5HU=?a$A8v]g.[,_:8#lt0 EGG/h!Ȁ 5@y[_r-04zD0LIp V4 SlqHȄi$B`OE=H:$VBYʒl>k!,rRUGCc]v;djdk;bXf:G1/M'dqYCz}.m) zb|],ȡci? D.<7fW.kؿ 9{B[|l mÃ6wӱ X*Ճ:zHB*f ljWp:T.UBABRM /SA -"iHt$Ä& ʼn&8VxL60U`^t5',#JauLQ3&"%ii(Ic#ǩ8@)qoqunuIϐ@-fۃ?Wv={!GD$[=[cYvdgSJbpC'DH$*d(d%5ŗZ!6袄QT&.4`$H!4'GAG !}o-M`淰8`*6 _?_}Kܦ\I' 1D$,(81JMDf@J(oqk #7|Ơx][n޿y!i\擩j⨨n,Tf^{̜lI*]0sS*d<϶X=BK@]|{au/U2b<lj*I$"" 8H85H4a\}i𶠁U.zw3>:sR`̵>~EVG0\eGr 瘀ճqn.8)~ߝO&N>e ՚4`aj{b9 ?'}?ђ%}X+_` =+K)<=Bd%!84jv-5Y[9pxLhmwK O((;?|,^bnPYg<[^}P‹e,Hvcn@1-PV$yPylߟ|ݟB@8 Fb=DxiqDIJe*hLko OC~'ww=!v>`jwOW0՜lc2!5\J<(pYkX +Y8OC% (A2M ,Ť\)cv1߈071ܣllbzt#P)~uɈL1KH&(Bd6j3FQb"bJ%%VBpF*ca=Ac*0EG&&GDHM%HZJ@bD!l8;,r~4)kz`^37ajʗ],ۻ@bntW[O/a2bA $0+ALj҂ ,1TjM#qŨ/@xyCB@ 4SgOZLcf4bij%E^%"/8B4*Pb4(T}, 0ivuH1T+#J#,eJs# rn$*Xx_gKsMM(4đE̲ٻ6$WbKgzy1<%(͢dˋYUHJ*nV*"22>ĘV =7 7"b790bYU6w8JI?BSE8ʄexx FQ :5.9W 뜫'5*y5cŸz ޿3=?ŧUT@/5~Yg\45sOK3&BgREgR'HRٛ^ ފ!|_e:.d'X!SɅbNLYk;;)^.gJ`HUt`NP&$Kde2.wyq GaTasoiLM^]<UOՃ7惏lMg$s6_8_\sK|nDhʹrAgnm-]5CZix6(,@i} f0bGlyt8nF6:d[k :XZ30ӑ0hR7eb/-F!= iTL?\| 'XO?>?tO瘨O`";iߟՁx|4׏޴]5 ͚Fqz ߡ]ͺrKՇҹWloQCCt~8V'?ikEO+-C?n_8*^A]j Qs:)בv|P@U7|?Z/6GI /I6圉2000{0UB7ik4<{eކٚWT=u9J%g΅(Bd DdNc-*+}@u PC2Lo::%GͰ|ശQ㕞z]6S=^eY`A85*/'~qR^V?0xl0]D1IVQh 5(W+L"*DH! 9&k"6%2bQ0Z)"V`HD#a _#nA%7\.A._4-=3Г΁)|O#\a9tv2ON+ispW;BwE V*UaDNFb^S^Zvg%W_͙ S! :0Eqc/SD;BX=s*t~N IS(sVA-m=3{9A ;sv֝hu9C]\XϊSw-2t&fPUꈒuAZ2.☿4_=z!og=Jhzzաg=J*g=iҍ <w, k.6zu*JsGN,r~.&S0V,Z-r/v*>c Ia,">/gipZV>PkE9GeSeNr&ͳk$R^]QBmћucH&Bay9N?`.0 "- T)JB[cxRL@Kv飶ěU`̙[v]K4(ӛn#Pޕn:('.P`e]1E'h4>9)4~ %= E>s?7s=FG5j*p +$a'#jC(#AVDpY HG0H`~KTj9C>Uqz?b-mWbM Lk VmVٍ?]:0RtX)ўc9L+9D*(;ekä &G#Ac߱Zb%FDJV;ty3~ĿK 0(F.DYYLy:O1U)|1jQ]\+U؀=BFB\af4,mYrƟ.ǔR" r.MfGrŞ~S߿8]|}N1ΌCq(3eơ82PuȌC9 jfʌCY3PfʌCq(3eơ82PfʌCq(3eơ82PfʌCq(3eơR 3PfʌCq(3eơ82z2]8adOX*zS/5ÿRRU3XtbN,Ӊe:L'Լָ(l7fvndvn2M\3gv+dvׁa&Xֱv5-kZִvn2Mf9}}?,Ӕbn޺u8ㅏL"Q(P(|ui\>c|~ʟq ZT,iqk|GwI7}ʄ 8,cI"]ym,觤vD3 B;]T T-Tnm-ҫ-^X RX$",^('E&qu񢨲X}1I癊{W`tqVJ'ָb<}t@'}^Vy#_P蛛sˆ^FcDNBXYL^jʈ6>b FрG!eLD(ӕu. )Pg8|l|O9O2^E& KQaֲ7YoRod =!"=AZ3RRA(Ŝ(a: >+ȪߴkӔjx[sAukT Jl&)C{i4aPۖ oYTpY3hfV̬S:kh^YRRltu卩EwݽvVSR}P0#:nt) xP)G4rʭJNo$rPxGRZ@`|,n99w},oQ,^;[\.r# xҿmTl|.5IfQ-Ԕ.[+2~7mjL!1e-&ꓶǁl;nxt 6ԉ.IF_OE_o\(;P*)oG|߁^dT&,Een7⃗=SH9qF4e'6V;yW>&\Ed$^uKnpG[`L,Ӆ@HVKc0ѡk5i5>i1 S.4͍~:ֵI l/ =J\^|;<߉ӘG"h@QAsglrD`p'%uls>`h_ݖf=sŴCl(H_ViuOW?(̥$b+ B /lPqxÌdlMlO^4:#램 7xBTVKL*wVp+0Ec:%b U cJ#68R$#Z{1[,} c1Q_g!~?lA,'t**hr>]:jༀ{ߺ9ft't$9䈨S>y.EXSo 9tK3wH=#OBIzTϽ`6V^_Ⓩ@~1^{#;aiq3'n2}Ⅾvmw`􄡻wUc̱b:0?-" ` Tfx6?c~:{O-Ka4O_ve8` p\bvnY=LC33U9TV~yyoe_/Coq3VgCo0rՂ2 {S( /}-5Lr߮҈cx5}.6іqGЎ07?K ^*R^J]F Ս[i6Vcj8z9W>,hfp!K5 -c1 %.:T<hY,CjXrqk9|o˷&V c0٭6=Cɑҝ#elxZg wٸ\%yW`eRF`Ō^$5Oj!<+p_}>3wm%88m^7y3d}l `Xn;_bw˒l$[-K{4YdWůtQKȜ==V%!N4$rZX(b@7hdqq,<҂&1H:DLMR-W!""Mni?W+C>-YLR|OG䨼hɮanLڍ`j:bz/PePeqPei5?t,y>"1\ Hj尣lAxYo 0!EZI\'f{>.rp쯯nwyvo+QD,v>j0C<ۃ6@~88T<|'?ϖICeLգ)^^^Q[d@T[,(-GpҊq*N)}zܺwF㞻}yw JjBHΑE&S}vWcƠSbTrK ;8ֵu-\pe'r=1C .P:p>wq.y[ (fJ6`d1ts a?;$iAa*.zSKZ4BD mT|\,6lhJT5 $( Xʆ*g(n weoiKK;j ہQtxs(\Gvroe d;K16+?(ܢT)}hz:fۈ$J ~PD@d zy&!ǿRhD[i[paG{F%{ L0P8Jh>xA@zZDHj$8a[ 0 m18(8Nj"08K){ au˄էY/piőKSLoFAt/ft}~Zp}rbMd .BNF6kCP8 AJ@S4I9] ?+I uzvU)%x(%SUjMBP|(UhѢ!DE0>^jBeDZĭ.n^F.&突Lĭfؔd+KYN pe F|$ӽEˇM )6p`7T2QR{S!ukHm'ޡ&VФǿp)8ǦO+IygHKI R˹uS/g1 *Le*/>6gejd]-o}6K UyteQvI~zOOL! Gq_O6yW5pB5ng Ңrr$[ʍN NW'qd-IZ5u#+W'?W>M.>%("gjqei4<ǖ9/w3@HKKT֖@,o骭 yeFp(Uz/ۧ|L}8DIDbksVr5Ͽl޴]5M6hz:̀on}U[}n>4[_}4>GK=|^qGWlj81e<_A̯+r0%WIgRi1_j k>)v|P%ᡪ'S>Jll[E4xzy&唓IEMRV%봔yae_5DBl{sot+*qh1 #r%r`{1pn7(nک=͉ DɡN'w7[ok`K ĂlfX<|vZ[ÃY7X_{s$Ps9"k V``qGv?]O7@XvFcqEy&Ӛ`+u :rJtt]1Sznl $ wF$ G@5Zks @XO%SE;rtQ" ՊlM%*]f?{dy˗ =9 .Q~rQ' ,;Ed%!"VBݎġ= Vt rG4/AS O|V,Lc+LXω <*A~nrOR==|/W_O^tM§)Bke g+~_&!z_P~<+K5{ER)HQXܾ!;D!}V׌Hmy>?Z?*Fy3; ?T6KhͷC" .ALgY# \ AJS@LjM<(ÁDVHu mm"3' 3N@}6-:Trۜ6J[ɶ^E`nz9H >P1@#ƀfgM3),C+qT9* Y@מ@N <>qq،3[^[eZXbu5%§EN[3/ eoc-se κ:ݳaFFDˋ SJozutP/ZRC^@,Bvv1en =e)!rO\Ln(؍#)OIuZ*A(pD7s{1Bf9+v,6.ԹXfw=*1kfSp"5q7[)^$x ˄͌1:qQ^GaECiź-T[]}}5#7]CӰ-bށPTٟ8x;rtvƣ2',Dwjnβ{9#9{cy8[6qF`HeNIirh p` cAS},yK~( 5^jIO?^ K3=7x\鷬c]fFg+ {7hh7tRo?w igRi6r:᭶&H]Ҟ-U1%hۅ/c\o'9*{EXz93_C~SMj1ЗC&S@Ð/ (u53A +NB]kͬ}Pv 0Xm ZS 1PKwQ3&H|2 eFuK֭ҴjPdѮ< 5tn96YJ{H/Hf8sB*.6ArElyU_Ia(3?דh@ c0`'B͕$lۯ7f0z0XT9 y8,)R-,-"T'p9̩{,>db/Kǀ/msJJM$bIkhm:%* *7F+B/ėf;kB ?ƶ_OwEXx~(yt C=A\Aro+pDˋ-0E#BAxs4 ) VЬg?2 r|Py')60j F@pM3;f=8ǺErõ1,=X~V{KvCtpxϋ 9L:JFbqDLiZFM Ehj-ۉ6=."7%|Гd (v=o$׬{WΗ:/ Z8Ge8(xCbCC(z5y߬߫mR$vc .x뜡2ĉDy &g F8|!C 56D&mr*[0 *\iQtT$ ƚ:3>l5U7+U_L:p:ky<R*KW _ |8o5-˔#.rlF?:XF֎|Yͳ]Mk+Zr{{}ӧa@qW^̳|MMT]z7TsihfD6woJv{ [0?wځK2T{: oOd sX39DY`n&cWcI䘥u9f)9~DPM>NN} Ez7!q` RvgBµf?o}TU&RX`HyHGkȃs8NrDb,kH(\ P!A|*.kB hf9ey(N%XHIhKonyf>gsҭR1ʒ۱VؿƎ_4#HEz(Y:"+!7bdJYcrrR +HOdW<#?VRT)f @ sLz!2}- ԚՀV^x5 >FM^C"?}>\R!L'x_Ee~;C7yz{5w1{!,p $|-l$.N,86ŏiq9;82ZJ.はtAjrm,y ʑS>2OeSLd2ݳ\Z-nmvpP6 3r1ahOu7JѼ߿?쯸Ւ,pp4K5Fєy4.F٣]/NHn`89|zxx9_W?W~j.|O>4Ya-N0GcJwC9'Qox6rԜ}"w%3sޙ@?ii6Kg/$g~" *+~6\O ]?V<(Yl{6p{u9LS)+b]r:rC~{g3u؊j߬?_1_H}~(?||~ߟ}#YF)eKPj%sIpg?F557ZZXc觞|yK+>Rp$qfrk_3zZ9.7Vvb_eυu/NnѬwVeͪ)6*Ċ 9±2e Wn=(HǛ3m 6K>X(y'i\_83G}sav |ݮ,OiĘNLvH` H6'N% PݡN+:;U5x%r{]ރp[ۙ?޺ s qQLFM+aPD2 :hy-L9,m<>d4`,+JRvF5'wژE-a2.keìEF<򡫚hgV@ю_r U9lJ&z[z9Zz^>ًq xtw0V9SA iW\ȁ._Yi.G?3F?bĵ߽BImJI%.>I &s aivz]!'i'"̃ ȬS%2lm9kt]jZJ4< .й˫,P~p9{ivv`gs|0y-U-Jo^)KߡgBf#2i0ˍ1]4|n8skp~q~q\y y e29(nO]30Kvi=IVFea 0qEfv32#9x'SA WVRq^֞Lw $gӎ@vf(\PdʖV/.A 1״0rhBo*nßB⺉<]29)۲5]v^}?p/g:jf51,fIRe0 '97 5ғ&K6k1t)]g]wxfRGu _?:@ZKg!)^t &fe.#2ƶ+1XŁ@)S\L #lm*Y Y$mH'R'c"s3 @M $wY.рEh!餹HpQml'iu B1$FEUn睺 Vx jo8B5zq|mvx5:m!N' 0{YaI=8bFv/Pى([r($l8s $H[cHېCQ mr$iꖆޥx[, ˃c몴axi#QمȾ@,M\mA-ALk\ӡ հD6R׼#quq%hAQ[)\Tപ< 9a̹c|`rJףC$8_}Tٺ>Vg߯1a E?s1:0`T@o&y{q l DA2%0,8 Bg.DV&nȬ[^d3% "qO iK>ݛ:'Ww~3:f'RGD0)3Xσ;4F E$V^nW0J=pErS |' BY@IWK}UM8,ˊnwѢoSaj5`C <1՝\\6{$N0' ;NBm߰L4|0-auWf҇(6lH Ȗm:d[Ѕ.UF%-4}r[4H.xGRQI\*zKh[;1G+V41's; {)_&"v͜ |%M;aK40{?>wrRĎsoyMdvҜ.htkB eYuW%I 7 ] HQ,69`I޹5ɳw# ^qyd_Q lgY/0!fRGxInmޑna몝N\)X]vB3 QxL:jo-|> "|PqH+=&XwLhsiEmL+TvV]NzzbPyl_Gݾ<[<0TF\_F[;X$tyWh~|gU, RE+- ^2K[ C,:lCk}nH J_.5yT庺sRR>V.5OGL*$%I !H Iǯ=Acҳ{CErY#Tc1`16#\.it*Ř[TY{#gYO@',vjoj>CdXa/#yiO?3~< yu&v~LWeI}˫7UBVGO>D0gtҿ.wϿ5j7jKTnT6EE$)˿R!TzV9[,CWTYUAu:?noxmo˵['^L^1^ٍ skwMQWU~6]ՐmZ\-cj2 x曢6WhqEERKt=`YB<-POɒTh'qɟo=OrB^S(ki5ŅPYk\0+MP*NKAGE{k y&:a w϶ܒnƯ$eё#sN^q :AH$:i+6==MDU>ճuP9*5DV2ɇ S2 [tPǃzK0VL|MBݳvfEAnYk+@\nQ0=RmbŦ,[~P P,1DYRi%%z[:k"gá557̨۪xfU~켲Oy4SN)!`35NFbiǥP*SN( "ur)\І &(R  F8BdHFN+鐎$@P6JyWF‡Ro'ZgbW |tHH)oj'1]DE?;]ȱ[~MOVYC1vʆT-le2䎜RP%{uܾp.r¿/AA9x(gˋ> Jweb^KkV nxbV.{X-Kq.6ch7نRfYuen~s,n x+rl?G ˎ#:TiQ62sL{hqԒ;˴ή-/̶m?b_H6- x+#A#{Th:㋸SUTT֧J-@ L.zT"E2RH%xbJ'A%zTOS ^t 8F J=>XD"Q1rg:x`OaY;V{=/`ZgNk~<ǵ[[Dy<IJ UG1DRJ:!H.X:djvz ٣ʌ~1\tu'if]<fY7hs0:siŇWah(s_YOTߣjqHR4XQ}t vYKnɒS9k$tY-" Oϊ09F0ŦEW0'CFtVSQ9^?t.Gt(0*_CyU˩/k/-,ټMn>r̡b,33^.TpJ)uBP)l+mpL,r!ؖw awJHl]*P+jZ_FG+,㊫glgz^h4==KcFGD13q,LdT!33wX=WV+MY\]61N2s6T,6T+y+.$V[07']~I{/K vy,[~c.6L[nXmX[ӭ!0h%= 7JoOݨ=A Bo'p\Sy󩓺(jqBɲ~9>+՗}~i >2>" MDea]݁e] )7G]~&(JR>O7@ Uz+7ĿOXڧ>Z.(Zczvu=3t-M96;0qS~~<\y?jB~T6VDT#<[O)qv *?ԒEO:+^7Tg}O "?#W/xV97ߡ%h$SM~LRGd`AјLcTs4 0 2Qpߖǟq.P&֥}E?QcUDV cF󐢟o: g{^痗G O%䉊Q#XQVrFv/%%\Adَ{N]({t1*^ 0=Kl{vz2f`%WF4Hh"L1 {ﻰ{DU>D4%aHr#"WgkAw_6bLWeX]!Jg).ҡ5FW' LDe \^2L)*[ismG)D=O{Evbz_ "[t/L[CXjŰUWZ6/.~Y'hcκ1~-[V~u+;Y;ve.g@痳 zQ'%\+\DfYH)qOJO'=q`pmK7hEH!0 J @bɻ@TEz'F zl̻jUŮ'l<93@P(-RBDԐ.,X@r]CYJ7)ǭC(S> NVIq1vdVBi>ʔ5( q{)9t3}U_(3< 'FP2B,@DH"51WI.v`!\)h]o;td+&u巎~\w:ݑ:k@gX҄Z>A ItOZTȬn{me]< G{kLӺC'NVeʺxe7ޖ}p|S%8K+-B\3.CWk\DKX"Xr bHx3vT'`NTAL-^ty>\f APkݥNfț,eN.߼\hkᐏ .&b<yUpt12]];盉K}Mcoe;H\O=FQo7޵q$B/!dm 5~Z(R!)%U _HcCfwMO]]v9g3+KV#e1QHG4RͿhR!nk#EpJ[^l_X*w|3nEde?Mj=Uv$}{%_~\*ךZs$dZ)C}AP!ǂAV 6mh;t툮9E٦7y]t=IͲKN&~:4O~ՠ\e '@ o#9Y \*/IDD0Qc"^6d=}CKB*FeJpϣB0HL{A I&6`AX71)!ROQ&IEyPR k2Jxޣ9n!zXsiW=bEv7ƫRDm^ahjaϽ ;Plf9sK3@?*\1IbJ5*- .oc(E 3qOoHѳw<(4^+氠=c Xi&tZ(j%48 \JA] h zZDHj$\9Kp`.4(m18(8Nj"0   lk#191w:W#.3%}_jřrF6,nok(n41]LikYޠw9s6~&2 J!X'AZiuƐ `% (9T0J`M|B7=kfkq)%xH%SUR ZQ""#IGAcX2T> Qh0]S+[PM)$Wle F|$s_J5H YDOc 'h42yC6YA'ųNCa$,}:n&7J,6N 퍪- SV՗jT"OOYX_Pᇏ??Ox)ߣ *D&g`}ylkho64װO]Oh}^'@|Cs,N (C y->JU*a3ˑf܀*є7 ^vGB/Ukq{t Uw&$%봔y4G6k Ko-jL%|Ñ0;W"Dž'8. GwFqN*tNh{@t:lLXNеO:Att\[;GjiR=thZcWuK_FS<;iZ@]>moh;Mx|WGo%L`>E41&彎J$ڥd1!$ýTHк&g JbEBS}lM I񾴽lKWC6 &eF2 ,Z#ՁHOֲI{_:yȌ}}אnPmӚiˈ+Y\__c̒,p(qC$.}b"F 5A)B3),C8,Yֳ6p|4dŻyix;A;_v[[ -l fnQᡆ 3-f6'.!s 2;`v<0yBpuz``uxaPl cUһj6NVۈzJmlb< %mۆ˘=o\}Icr;Iu [g!)*tnJ3 :po>7PT{fɖ>WIg p=[3{&.C0;n0Au%3lvocJTl"Y=݆fѼӮ3mX'T뫞Mrj7eEMF\wz @)Fh#)$jqVPXmb)*o5~ozǔ&5xjiy$5^Vu{Bz݅(>g}%_Y2;]6^E.[wJzN+(D[ShqLqjrZvhNzIOt$,ׁXB ;-s.?4]H[낶Jx'<ڶ{ =09}#\_p~Q~J}zi5$N%ep,sѣB(qV*r.һe{]Wt,^jcD-e#[к/ܶZx]M|Z?{<KPKs~.~_.R;g^n*,ތJԫDMUs@ӓrcnt3vm?W֩-|OlCB~F, .PBt|=H-,EV'GkͬrhBmHh{"/XDM9*rgN HQcpKñHr1 JKC4@VCQ4m:o3 RZ0*QYíN*dakx op:+֛Aun";tW)5s+wpK*SNljvK׵޽S5="7Ve6Bۓ\ _>eosYNܺZl$U ֹ]v>yѸxZn|j߁#+;q}ǷmΎ6֜=CtgJ}H/q幣%ϭ "i?r2CQ3yWnnָB p`  Ip)hhLϊcxpU䳶xaIn1!u6:t #Exy&; DjRE[F]w>H:"E !1JI˷}5rY<&OcY:bcLhSvz^h.GCڛN׷339*Rwi wF `Es'hx,z %ax;䶆 _9;jm@%B$rjBR漓"$41Dt\m[ř% ȑj+02, :FZ"'q9;#Zh& o`\S"H`6$- M*$%ĀV ,(AF`T0`4QAܾroC yW޲,vZ$♶<@ zP93Z3tI4qJND 뒊A ɊD`dNq7L$BC_\fvؙ`26ZpZK?4- 4*4Q^ۺA.'̩CQΆCOu|j|[GDV2{ S2 [tP^=S[vt6Bc$e41/Y}<%ђrRp2OHMlBB)Jx'.!"XK"Jth~H};ݽ ExƩRjfs5 'c1\R(i)'Rf6YݺAz!fS牽;N`^ 3M+lm%;b>%sўgj$h$N "W#R&sJF"F \6T^@Z|S=v\=v-c˰i`}8<3eg#}!-)yFՕ@Mu,ϥR+e)5+}ՕB/b: kdPM'ǵ+g .:&*i]ZieθH])'R'ؔ01F,5&6c#sBϸUj&gVӵ,,ka*TG#u` `AIf yx3qX;h!--L/a%d.=I"d>@+J^xr*%T"]P:VLk/>Sx-l|64kҩW/· ޔ`B(tA Nz;W#?KP\y3^ 4 8'\5:ǫ9q[N ]?ݚڠē!{,MXݮbOhd\?+JQQ$JJPe0ķ1C_ԥ3vzJUoP÷r(n# N EGK) ZEJw]"B3H#Žଶq.  M]3u%Ζ9q~H-1>9ny-+w|E*ui .BNF:kCpq0g[TJ=EE_x*9M6?3%#SJQJjRMBP|(UѢ"DE_N <qpVx8uEY>eJՌ4Lze)HAZfiQ`JI1 zTiE D*t'- 02s$`NS! c!(zEVئ4)?bcS'敤<逳'$n:ZۘۧgyV#_Wo1Tc_ŷ)ԋjüʼy+qnuۏS; &b1W'>L!*pahf8Z 羪@ OLDZvtANrd*Q5Ex~ԒD 8pzB1UByi(XӌSAq4S*?.DZ 1{s=Kw| [L6١j#3#ou69WF1\IxIi^%|Q3ͮ.#YOڜy_x{9=x 6`ӦoۥIb4t9 [t0!--QI[[fXk3`Jh\'܋ًGyGW7m[NndVg(o+ q#a/Ϸb1NE6f _~uSʴTGs9])?9û??Ͽ>|Lz_>}x=ΟgZ6x>J%_5w?mߴ]5͛ht吏u[Z}8u0; [+.Fz'/8xfqKA蓋0 j~ސ b/_IōT<&Dy!Zؑv|P/@%᡾ۜy%nVֲ3 5~As\B $%봔yae#RAۈ߃O=lZ)RB.rÈOD O2&p\&xC& * N=e ENu^gc{γ9iuz^DWG|WEM32./^&gs.͜K9K:n9^g7=yq4|ª(zF60.ȖQLk^%tֱ$tgy }~F '+l;[J9BqB=uDmTyOp@@bɻ@@8Sw Sznl $ rA/H0h@B5Zks 8CX`3q{VX PH>xYȼޝ~K ?S-Nrrq(G s3}B9]~3o^nrWo瑒|XaT5i(F.sic~q?n %p)4ǘ!DxP2#@'DLgkY3̇pZȆf4nOb8D٠Z>y,ٲ^oz9H >P1@#ƀfgU3),C-qT9* Y@g]Mv𽂬+~VU8̟nf|Zo]"<[FW-3*ԦU55sf+s-5EXR4 j׭^O(U+Sގ&N:H8H6RG =;D}@gF0Vۀw 3ógqKL C$ll<<¦U]i-Rȹ_^{J\oD;Sԙū_OB~* ;Kf_z«ͫSͫ{[}L^kfE4 ?`6to yw <{E!#@d1hԔzb"&ygLAy1 F8Bd36ږWV(W147sC6.z!6T|>be2QQ~;su?whyԲeTqQ%xw+y'](V,|jr1Y]r i6T{T vtTaéfsl4J-@ LyIJ$ eJĔN8K09u^jX]uHg(A +HT"w&@)Q P)1ZzH:^jiIO@gG֗(2ϗ+ͺmm{'g`z+/xjy~B-/sRR={ OUJ^ݴàyn"UtW(uํ{t#,ShjiGΏ]O}sIwf{EVf uͥ/@hwҐ3;9!2omO޵ UZ7=f/IUjEsm׻ݶwz^5#+ޢ絖wx;؜yv3x;ܳ|^k..\Ltw*y%MֿzRHճ%wut~ߙ~OukF+σe}H-̂(s FJ. (0d=j xҿMj43EupI⤜HD68Ɯv "9wpgoVz4r7yZ:_;+B3e?\WK9B:!hw ==/ˎ dpZPC FՋ"&,Zۛn%٩'!3.ud(OIDA`stF%M:HŬU<^'FFnXl qȌ"+l-(wg9덜=嬕Gdhf] oaRdi [}z4!9 D c$0aA 5kDD[F>}`w!1L6EIAmCUEaL?qGWԊQՓC/#ג,g>d(٫ `rN`\$RXknlG7xTM/{ލ9 MiZ Q(i0;TNLRk5G4qCg!ڮ)'"|c,|f.|{GB- GqKƕԔlB`Lqc Ɩ~d qc,G4^OM4mcw9Y.'-(' lr(g)BFR Nz)D.ʯ^JcRKe g˱,rFdbil r㼵 S W BMuRyE6L*Ow4H^ޮIBK|.lYfLnMFw 8g-]YDk9H.O"VY"%Pr`q !'i]p%c(Rh,pjaRdGѳ'Do=f|E_2+jt-:nV6>i8@=M, O'kA \Jf+,Xr*J;˽*lZ#pn/ʹb7U9~#wzDOg'z`L1x66oBM cX5ȟHr!y'#ʟhzʟ4U!:̝gBPb9`JfՎg, zD¢KR!8\RKd.s>w* g|9a f{1aDXnzufuݍ%J_,c5QWqoQd:=:\NwL) l,p~z~ 3.%r+ixmdds uNN sx݁ ԺKfz{ɎNmvCٺmԲ͜[vyxws=z^k..xh5}x4byW#8y<km[⋗q<4h%sB9 m ۡwknU7?nNZ-W뀠)Fr+؊~$4Ȥ?CR9QnLv ^Ey }~3o\$zk u)B9C!$xNtBߙ"gO;]rLtIa^3%O{MbCΓ|d-L'V ($wy$ؤ8>h[hB]dtYpULG"T"G 0Ef$5Yv qQhj܌c=IbNIb}(H"+#RwJJǔ*9 z@hW!d؝ m-Ơz{NJbJ&83N?}Xe`\fnHMq9E` d^}t|j!lWrtFW,a[6igXga}=]&7&7ޙԀ1!FJ Fg縄]#f5uͺy`r^2 \1$*]3CLJEZ+ZU9zkXB[y4C֋ϧe3E5(PѡūC@H>ht$c5VIcq\& +ٓ/ Vm˻R(Fa9´,: )yIPKU$=qٍ~1 º=Y)ħ} |:k$g*I 31)F/6V? sjd5b WKވiД1˕rB$̨ +H)UjW<#?VRPFsQ EsKC ,MmrNSZӯtiIbr]e1Zy7q7y==QA $|R$kR[Ra⒉Kvh)'~209LiϹ\_iiV3\vi[ϋW{tz~QxVK:S$Պ\gj\8'uک]tN|>`Tq1yy11?[S?fO~n~aCaz8Vڮ/Ho#k9G:Ft#fY~%M8h'Zn =Y9<NLNelIvz\IAg\'2RV> .l.fNؖN5?g dzǃ8B'ڎ>/?T~ӿ緟?O\Oç_~O'ΟhUT J=<5x8wۇ_辆CK { <1>1]r2;# )?~8e8y-*7?iujF<,l4H$糀d]TUVBX.b&}P@xj Tn_u}~XK}JR(ɝAUJ,`"Gi\Яk=˙'둾^ڰV|};{jh'6 lO3er:IG҇ӥQFX+,4ѰL?,-Oh/UȾt! b,oϹ$M,Sc䁩#+V]4r'dDZ$ZjGs/l!WS@Cu~$K+UQ"CDf*V~Vu9R (!!ZGYqU6Ji 5GsEX&s۽DZ#a].R'A4Niī -n(:>~j\9^ߐXki^SۍK\@&5SfN@$6j!T9E[\L02HY3x_mxݹjC M'چheQFR2I9#JpƒfTF-M|sZ<\7G!5Q1s&`,eQ5r|K~살2;>˛}XW̚f7 XZ8_'}6 ,MzU]ɫR^]!zUh|܆\̓V[u3 Liy} ~ݷV$ N7W2Ev$uRO=J!}jGtL7Q ._ il}&տOGnȠ48V1G|Lhe\od{>T b8^ XٯH븲IErT"NE7߷!qg<īJV_c#gÚ#{&b`֔a^+FrK<99٣ωkA&N$XR84]-d ]`LNi^Nn̈́Ύʚ+" s1Zlm!>]r^)܅Xɓbo bϏuRKZ"?^ ʕm7f#s:!̒ bQ`HrV&4@hQ!6N(ѵ%n~^zflW=t4i^ U>x(NT/HchZfY(5eֵ=OٯaY : t>? d&LD$:kS_BĬɆz0gXEjJr_KJJ&ner$ΒZ( DL=zH{I5^KuCdNkUw@yQGmscvI5[x2K(uF\MO[(`JWerIAyǠxK߰J߰HjB"˖3/y,FbB-:CLG`V ]N&/qR"3apҜu1J|k~8?܋WȽm}pq[E݈BzfT"ոZ O_kH\]\6f4N _:{*uHЬ 96ߨi+p2kQ5:0[1@u~q庆ٽ?.Pq0)~u ]DhČUʒtL!K^*a2ވD N&`.|kAw^dg4 n 76'D KZ7>}v~uo ^\uE7=n:nUM!GWNxZ)4AjA(FUB"6pS wBipjBroi5͏)`hV6Lj|0Ub.kGd쁹;`Ѭ{6pAʠذ c˷-ƱS҇zԑڶ<H7-mMAI)7\xv^kC}w!Yd.nVt ;.>?>V]('+w?tm1]b>2#tȂ̌vZ=A iZRg/C5(ih]UŢwLΣsȿѹ5KrllZ[;xi}~v ,)J N:h CM;.Mt`TR.2s97T%!sLÏ>=ZiW[G}O:6_gfU/Rbt{R8A|%3cϘNB5I'w=CKmjU6Y yWYX)$aBM)ɜ ZH.D_tX!ֵ5oV+(5:%F6 #Y+5cLYqL(4ԮPβǠ8fрhG9 gSri9e RB"ѶP9[]NJ7GGF0j`د %CRϰvyIj_~pE j꾒?z6iG6L3/d–f 3H(Ro+D Mi\RYzkJFAE]R'IhoIl ƓpBgj+ % #4 ぁE f#ZyCO s3IjIQ M|e mPJb: ںEt–@&O,&,dK|"a -*zcTӨK(f$p E6d$A1`"hÏ+JcCHPy{oX{æY$mcPoZݍiUb)ưr ۗB|aҳ> JzÌX!8 t" aHȭ8IEJ zцBFϬ8.72Pk\3[[>62k7SO )ԁyCc9΁3<˘/ޣa% z$BtMfn/49\r`tVDV|>N+gbik.H4ٻm|2>~(#yOщ'Zij"Aggpoִ54Z:ӣg=_-)7{>RZȭ$/_,`wQjͩI\]qYp2#jz*oSY#f#A$c&}eBЏnK5޸!ۤRI1Ay"1 QOrB)mnmX𼪆]8tw^0jIYHϽ>i-)'^'Ձ {N+NNnmOo/кȱ@#oL\ώDRZQNo!سuK.[}9޼n?Ӥ᪙S>J TpyGMTt & c,/ tA\qUӡ⪦ B(#@K) ?{WHO.~n ed.]rz}ÒJ)e,l% _0"K`]5BFF+\қ-$*fgt %ӳUx@&3B*. xP"ڃN P\YsFnuƗօgWLKYXz.M9Rf~V䛡HA"jª3 ^b m@ڀS"vP,F;_ME5&:O^ϴ#{x $gr'Q3'SB#?ŧ|Gx kgY|GީH"}'˓D쇏b n˴}WA<`rB-*a@rY]#{7Ká_[!=RÓQ7RF܀Ӫ7IeΜ;6T76} ~}sQꪜ- ?0%u,'Ѥ3K_ÛEMA maCʒۀgI:s 2qLGfUX[Boq :k0M5o=溔0۴mާƦS[%^6fM/q d#"r9qHJRh$pQ(j@B%iBrX_=Snso-n )[5~\N\= s2`<04pf` xePl-1ՃcK{z8vQ받@jXDV:mkYﻠͤ${\ܚ |tp;sR8ai I%M p`lЌN^!%ffRݽSdGmN7~Al9kxbfh󨖎 ڽw,:6xg=_ti`9o-/dz]xBD df37?CuWRFMIQtd8`}wrE YXmr` \BʚW'pz$ 5ʪ;'cG>ߍ "TpFGsh8$@X H VCQ!0mb.0-ȼR̴vV c.Re'hQ1Yyl]dg&No$,xnO=g\]3爍i溼իiϺmp`MJSB꣗̒ar6d]XRB*#8:r;9O:OFas!Trd27ㇻqbJfY7+L%sV^LΉBpsP 9' J 9'a m9g!eLLw&..4Vot!Bc$ÐѺUt QD&+df=abE))wh3$%H^*rSUe&1w Qg&9&zg&i"Ve?rGT "DjV|AOZ2r-Bf9!D;@BKzPBRJ8qʋڞ8ȋ}xD>4ISf-Ckg*G^P`ÓKKiH#9]~@[=!5%lzU"Tox`|dZ04p=, ƚBgAGr(:leth2)kEOI6YkG=V(rs79^ B1BŨ4k:=)UO‚#jU%;4%uVlB\MVj):Ŗ4*v z4=Rr<~%54~&suFOqµ}$r^`~/$M~=U ņZ2JMj,tS[A:Hf1 Kx}, ,Z`LҰ{ 49}F;MsL?jXr+WT@Xy8"sMPFD IvP߆ ع.W:KP }`)J+eT@(iT a:g KJ:g8G; (3J~LcOE, -[\YT1)wmpgY5"67 n}vWG sQIJdjݎnwO9tfwv8>ۭwmzg=/ܾJɞOW?y=O~%Wwt<8f:-ҨCXlMM]@wkZ>; 8T\zKSXZSyL0pAD"Xq/)5S+yMJ)oqf#4fW/$x{曏RY˨] ?!KϹZ ?0|$Mt$NL<8b꣒?ˏiS$9;.N~'K/>lˁ_oqv&~z5 1yZqzqJ: 츁+m}0FS1%VŜ|r$PG{mv83emƽ(׆z^iZd$|":_}"P[e'U>-Wbee`\f24nX1 sĉcIB$E .F+Q *Qu\ bWDcw"rAKaWD޳+RK6Ȯ0|R]%تlʿL?(<ʁE !1M5ԸxHsY,a$kKTRg#\ŘPqk09K%<=){2T#wwIs?ocS`e7&??sx&~k5-?}ADZSz^8Mxc^ߋ'62t5ZӪL99wlm)hiT΍L$:sY"x) /9қ1QSh[Amzhf~1:&s YQ^hhQAuֆ3;At㫅l>Z'G㶒׬;t v ޵4$/ ֩gv{t;˶Pfddcal6Ы/jESk\{omR#DbІ)`>Ҝy%6{bJ 3vB["%,5%bpE)5B{Kp'|G:Vn-r6 AS), &3}@GK+zd&R8RKBz桲6iJmB)43@j"pL(pDplm^?@E^U>D4`5#dU& db2PI1,Ds9YR)B{J׾㜚{6dܬɹlm܁BvЋfӻn!ҕxSo.^} kOաߴ}ò뺕Jφ+Vl;#%ϴJ˳,~ϖY0,XmJ뢊R0&eMKmV۬vYon"nPnlj>fT*Gi}3o"$1pN4 9kT^:B[2F_^> |(Q8e*J!c*@d1Zq= FSJ \ ΞQp-Po{}&Zf *U`u\WD!1 XeD /@ܶ8qcZ[Gv3Fd<aB 㙅M6lt@*I3kF/ެu'˿p޿W$CnDh 7Z4h&ss  0]Fj{{ͳc^-% XÕLTR$:ag!p0Ezo8W9WU pZeN?g?KSnQJ^&.=~G%k s>N0ћ<ܟU:;~x/^i^/[d}MJ2: bd|FJ. L&xW) j~nAѪ@T0,Flm2C.Q+>+:se9i^sWP _5_-g燫*$,>&U4w5fhc _èYһL{ˋ]u|O||\fc^g0_Ofs+ܮ8zi$Ho9W5gIG:kF4#mfu|&M8fbj1ѓ1g隇Ӽy[?!FY@gg=\HXh\`o{El6 9ǝ КOh9c??s?ϟ>lE5IIp|hk[ -`i맞O*Z e)W ,l;?eZkE/aWlbe2ߢ/wr*8KqKو,H3>7T|u嘆uf>&{MItPŠS"c)DgLS qA t.g\@JŽ Wm=/JcH/ЧZ`6UI27x6n8 Jw:lLpzqݳ5q6t3켳[;i+Ȕl^U]n◔3>[Bϧ%BnR s56.T9YbC |" zRO~[:fsIЕ,Sxd:hIRf,2ĬJԿco*8"aL,k a{+*E 6; -#\n̢ep9Rh>rc>x5yA5&4'4@1|/` {Wў?b5RwӬ(eO,A1JE`YYR➫{z=*e{V28D4)pQ9Qē!)IE49J9GyL\Л̣: J̙Z0LF斡`k< ry~\J<-#41ڳ6u]ퟪȧ]6fw b:#j=ӬuU᧢MJwЫsE^Nۋ4w+7&Q%4d/k/8",:2n--^ u?.q4x|NvW"2RtGLQq=W ,%Ov̉hlf:Mkg'^9 KNޟb׃u"JIL!*) gpɺ![3׀t`2fUk^{n)J`Q5/0U$dJ7dGkyEv;.z5U ϯ%uH(Ku}l:b!qV_ "ZӺ }sܩFҾf-;OJcPХUh)YiiKYrAl>0U 72KIH-[#gkQnda[+㱲з, ]' U.)*6(I, ñ Lɟ]P`A} [q2hb$QHQ$4F%+|,1|ʣ03 얳U`xK i"Pj#HL (B Xri̬Іq_v<18c*֨侴* #)4h/ D5ltqZiiÞ)}w&S!(t/iEn=ܶ{-> Xyɳ`1*1l"Zb>yRՊ%%G!) =_ 4\OL祱_ɷ.®iY_Z\]+_LLi"hB<ư~ f[%BG>LSѧC?h|$_Fا1>\ ?$jK"FITլV_Ԕ&2J/"Я֙7Yջ{9u*(jG7_3 $/ϋ]:wZT^-z~_{zZ/jq%x8 #L׋Yu^Q=voGw5/I=.\<ϐ<V+C V)r|)hqRT7 dW)%N&sܳ6NmcY[v@+TׯE_Լ5* \lv`1Ը,'7)&c׃| onw"2%, cYr,R F%08%f2ez_z i|ذ :`Vgq~8_uEƻR>gnGYet||í@Jbf'WNxRhb%)$rV@ ; xo:[J /'dtV]6)|55lxet|?|Y3̎1;c޲;m3V/k{~DV*m܏H߂Ȗm2դ&vRn-x'yJ귯xGRID4. U4,qX^C\\{޼t9ݯ3>3IdEVKͯzxy?F]6cKdL,UAZ3Boi*(BDC_E_[o*q΁PԤqx>mq҂o;.1Y8ozb`\*̆9xLEo.$}aIg<;&gdSc =).l{YY,t7 MfZ9~/'$H{)hXe%i <NJ+ Wd`Ţ{c3]}הKLt2AĂ̌vZ`猰Vt:{BL.X Qbqbg 19[ݾ[<0r]o7ЍAd׫}i?]:r* &IG  H԰qi.f>P92BZ`\b=@6Tl# d@[+#1UZyYoǣ47lA]{}וx6ŬY]Y;,5^nYMEjVr6#ӊlFQOIUVsr>rl3>Ee^[TD4@v#7x7:(%+.`d)e<\8L82* &` m"2-gR!29Dg a`l6!bkld ¥4yeDX!OF(=\֝WZZ]SV7Ųf6[=j}yR\hd)̥]mB Ťq5RY phE&Îs[5IЯ;>;tB"3CCdo$"D-%Y1/=lL*B ܩ2A5IuD JlZ#gC>N{-,ԒQe5!ep 3`V$ 3g u'nV oerz+Pޕq$2?/#Q} 0yr66 5>m(RKRv~W=CRDZ#:f5=uW2$X΃AD<ӖA*:vP5ǽOwx(Tr"єXTDJOV$ c$s %0C j;űثxbUBg /GiDSg-gJ g"y0NkCr!NIz˜/zoT" jYDV9*58Bd$(tS2 [tPN<+:C>t!JnY:K\em ^RV?{HhID?$t&l"Ѣ`K/=P P8B@YRi%%Z[: Y@&=6 i3"?~C ExƩRjfs1EZ.s&JhV TV]Z5 D]TZ1AxGU )v-CYO1z.I>q*JӢKS v\\‘=-3:z&t!: cY~nr⚈r<LP+b%>^k0Iۀ$]V{Լjpc4!,آO rvvƚpQšUYݷS,n; JXu )1^T!V FGEiJgo4ֿV2}22O7>H𥷫 3[_,5gFԣT # wPu8SQ*F y.0HR gb <00ׁpUt8F J=>XD"\+rg2x`OY[F WVMz&ryqc^? d}AziGy+X=Wp>q'6)~~0 ϙ؉!`?}u몑[;zwp*Or0ي` yq w<ÛjNvT9EhrQQ& G(h%=*J*;@ Ow^7l*T2Lx|94:ṇ$tr"lmxo$Ϧ)s"놉}Pוq-ꛟLR5=[?3G=ӟ(#CH nͬQXܠ??#j8zyXL@_^wW/Av.ؐ۵$GJhy8)-:xHBD%H4oZOSm`B+d!2g1݌[u0ן" Z?-겾漫m{mam{w;]8?]w+t Xr9yRV&;dPq" *~{M1Kf_3=!pՑbJ ?7sr@j:,L⇢3E%֩9efLjss+3S~_('sw~Ċ޼~C$4zS 4.%L:1_s;R dxT*o98&*6B9T"zM})` 2$Zb `^Xƨe_S1+1'M75 }s/WofvJ%> ӗ Vdr, }7}J;M2V^468/qxgYsYIbJ5*- \%1Csr_ކ0oW%0rSIGsbffj(=R &GI^s>#RPDj[' ױ80 T6F QHNj"YEƒ24C-rVL>u+W/pkY,[tw›4( \`e JK3 q0n({貔d$8GY)ˣ[*gJƼʓG*zi }O^ъR- B\d P  ֕NJqՔ ,puº aMjFjMI&i傁U-4ŨT0K%ؤYt?iZE"j'(I\& xC*,Oi*a,E'H &5&8#K9}b^I s8 xBZJZέz)n̈́X- "jd^]Y|R|?Yl:/H~>5:n\RA=2$aN?+>~7yI.n?DQ^&[>wɜ;ʋXpD5:yS Ң+rQI)Gv4ws4<=SNMjIѝlNPez\=iWnpd,sPW{q`C Rm_ěVJzmw<99_^-4,-23x!hÕG_#naT?> 4/gTk}muɁy`1[Szn۹E?t=1"&hH i8 qyfyBbP'<يDn<<?p6O##:}ȦQ|VtЉYGCHX u0ףX R1=Šݔ9C 77;ק ~ Oξ;}ݷg۳o{wF>;ӳwoE'qI BlM¯[0'poOZ}h%Ǜ50SO&|q I$S5$4`fr+CÛIoNWYOU"˸gu?a$r'=_Jh!oČfzT] Axߴ&]޻jGm5}N NRVΒuZJ˼ ш02as)ѠmA۞|aJ|'鄞tӚ0;WIKoQDS! #"BuOnSMQ__mɾ]hh'Ȋ vkkxֲfkgϫMJC;=k^k^[#~I}q.O8*m9SQLJdM"K5/W:XI~wM'TݞSw{OI5r$S{ۘ鄎Ēw(_En!^Eznl $ aHdݦO9+S cZ#g98*胗#̫*?Rwi%ۯ;$ta=<ѨtiЧReJ99ލHpޑҚ@/Stuie@?I;; #7ػ0rzȽ #@E y:(Ds9! {͌gܪD5D 4BA,ˍ jG#u o'-uoͺ6zk|e;oL'mҮ+etqq@ c71QiT&8Aik)DLF V,Ld}2,'QB"i*  2.CQ 5PDpJ[^k"?lrw$C\k|Q">Pnz\yn1ުV^"˿$&Jii Dd\&k=ITXHw"DmHv#bV)xouH2T=q,tJ`U JnQ$&TRmkYG)ϣ MuiYNT(6;-T\[fǽQ|?km¶èy,M.$LP =Mb`"Y1t1m\cA?9[ 6#j:l* wZP@Atp= @ݼȴb.,A$G7*kX|KRu2v'pLS! 85I*ʃ ༆(#"G;e(nEaPۖ oyӤ3{pbYWX=S<3SlyS˖JJ(Z6_b< -!2O7v/IN91Ix/m8oCq$e4 7Egt k\QG(jRcdKo p=mXllvTf ]*6o(+*h#^s jD\:Kg%Dh`oHN;kd4Jc,<,F£c%NR^rL<999x-z|<݋寷}Q{[z6,_P][,E@"[}CV{͈Gk\͈4W&'fܟE%Q[THKh?C kcYonU-uE&)lt$ĈH+$䤎HqQ;CaL!ٔCN!b1r:Az4p!Mv"P\ؚ'zF(lOOsm@yOE[\Ƽg\QVwFĔ I.4Z"Z:H!-.$cϹ8w%=ۀ%B$rjB{S漓"$41Dt\m%r8Pȑj a).Q)CLϒ@頓Qi%rb /gg5_qDcyφo'</u#B!ymQh2T!)!xa!NbL l]%oEXFPw<p@{-N%*J<8H3my5hAEjZ~zI`JND 뒊*ɜBp/L`(|>g/8vGZlOzEOp5hJ%LXAL$OsFi-qhb+cӨtDi{n+AnƱpFo-+̧!sT&jp# ZHP&G;0hec9={cZJ;]C7Xl/˅M/4ZBRj O }ɁM`&ůy2s?C^xmrr pR,$7=>xp*.j44 ~?b=\wׇT≲"X+"*rh՚]jbx՞<7 Ex6N) 8#hbIe8.R9rB)դ%kM'6Dyh.*`7  F8BdH+l#CtLW!Rv>sR|C1y 7p>|JnOi]]mr썹s@7{CGžqq/mj<)"{C5__I&(RUEVc ך!|2 AS@ŨңKvRj]iCXc 6?=6j{qcu5wv6P t|)k`p8ɵ߮cۖ~6??G-Lk@>{\9^.b0_iʃ .0h|賅ڂ=8Q@J {T(:&8S1{Tr y:yL%R-,"T'rX9̩{L:0Uz^βQFGυV()5 %ܙ!"OD$4B%F+BZw+@\ޭv 5/sp}i>j!3?L2{w @!Eyiu;7W$y^D+Ǚ$%LߢGxa FC:)M <>qʴ ;Zhe,\S #FqvrLH`ypKC^sN?+T>+ Zs@u|ǖ$bL2jj@(BFSkg"۠WЌ60Z.7yoZlb9޾9}ac'n.*' X0*p,0+VQY%Jސ[0}F>#fZ ^lZ} ~$C`:٠P@$eFU6F)D;Ģ14, :g(@ Z0*QYíN1a1r:fא~8Z.ʄ]͕gg&UkswpKYaպ* u6jvK׍zZz_n-BFfE3uյ@h{zrd>e( un4zk\@祖CN*b;"v[:nrX|;Zŋ9i{%޺V'.o^Bc&3PXz0$af(o//j:|Pei:$qW*0*TYTG^,[}D+ﵰVI| Qy%Qyй!Ԕ0&$`OWIolVC}$&zNG4HQF.UƶA $`) zyv\FuBY! x  'L%#2`%.RRZy.4JQ#e W ƃjǹ2D&B24ԱP:V)r::Bc9XI#6jWDi,G e"(e#ғێlǗv(>" wDz$T(ɱrR},+i iI& s"^LLҥ#_+)$2Fq2J^2u*S zJsK.&/F\|bUȱLz 㼡Gƿh77odNL]gnlJ E WIhi*cT1xK&NY?'<&Mv6y;#~rQO84N?T\L8q_$v6V2 RjK]Mwߜun)˱ v.zr|m1҆mz^w]gVϧ1ɿ)MgEV V3mYm+^o,(_-fX @uve:8`z FG|n8iTEIwF/ Y,vn8<}ް'mzQfQ Iϓq{:x?y|'|2+惿gO_+S„5KhG@O8|;4Oz}}K YQ0>dޜ5bp6x@bx1S6]gGDP AP>#lt+ {m峯Kd(-1PɓITN/8K_8I2a-'I%IK 8zmX1 {K}Rt+s{{|⸋y$c9+>Ohi OKԏ ]umjp %".";rdVcwMf*Uz\ҳSB1tOT[*8,cx%ʷjƊCemd)xcSV.Efwfխ:RJU7֯Rkłu޸ۂv>X;&Tb5{ݗ'4g;=]KC-q5@6ᛏp[% [?pjPPk;=_lwΛιq4[*x]jw|Y|yҼ=X%4XœENȕ8gRnK bQc2vHm2Vx3 kF{Q850Idrgw\=]({9z {Gc=#q'^ɞ\'uBeT_is1< 9g1f2 l4E'%pI9 ݖѥNRC!{eAx˘\@{A曀.M9zJym'yif%ZPEEorsCeS|T70e!o^I.y(W# TZ1X0$,QaIp/g x@ '^yoriF%LR^Hf\rXvBse@\i3rMҚ ϒ&%m$ȚYg(g\q2b/Gw1*=>QXF+s0&-$g fky)#F`e=Ћ@"KAO-,uтL@DtpO82>7NH= j٨z;K ٫=+"@)|։81j";Ac*5Kz`$%G‰zщ☦Ձj|ͦ BMEzUڹ+f:ގw٦ M)KhRdEɦ&zi#B\'u" XVw{P,MSEmFђ+h>ţ;X*L>tt!JX:5ѹ77DT+U[p" /pP;w 6QY%8BR/%βPkY8˯2RXu&4چJwEsdB̕uTLV^]J42X}COG<*?dgOE19 nБBL,bZsT`X蓕?29-ѻ 6? uFnwky.FDkeJK[/S޺9ܑ9#O,x0H)EpI؊]j h53Vbp~p39;\ 4W!o6 i9@v_ I6\'79>ouPr ݧAqާV?o M\9]wl7kӸa^h m{P~6ɑĬ¶7:d$5IwKZ7 KmԖ=: Oײt ɲq0کhLX/]u:\R5)Z)a*Dg! o#D23H" -rqց8Riq9Xl~7*~jHo>m(o=;C( ZV,Qy^>a*LNT^pW)`\%+c-2!C=(d/yFUmD}BuG312FB`ʄS_,#7EmAƍ\JJfnESdX bNH-ZA.{gҫ c|9a |JQkͮ?-rK]^lG]%FbZܭ;4ђ#t.<[/ml8f4=߅wozr5l~Hw%AҦ!7#+WMV?AC.=Rޮzyi:-.ٞ٬7+zs>4Z^EZ^{a~(N[<}h5]x,n+5_ptr2k=ݱڨ_dz6گyNF={tC'{{mmڡ[o6'ȫ_WAד(]W[>2dGVJ/7U䠲LỉW8=֜ek-1#JkQb2Ŵ2P+cm~^r m=ZX)$N *ɉHGtllwFvc;nȲ#f2k `/8.II"x'?(Yn:ې<# RB.p΢@M;B&VlQ[ȑX:VW蘺ANIR.))&9scAG:H8DTd%JGԶ#~B!ŝT B6>.7\3w#QǍΎ(8#h*@ErU3FU)XJ,Bjȳ.( H8̹J^/c'$c)C־bw|mhpc/)+c!XËH"ʗPԳ2*5Y¬(F8B!"]UG˻8^UB gUe]U v9/bmFZ~ߎp\}*W[خLt'%{4~a70q&@:KSºӱ~)ħ\p` 3퀗L%.: uѢֱ5}|ŠZ(Xg!z+Fo4IXх)-1${AV)UjBI!2Z Py<E&8!RKr)w,VG WʅOi6ggop (i?[V t_eE9;/\ ~GZi0g f$ׄ(6^ !Y|_r\g`()FH gWVBgn:M~l:#xyF?[6%U99cɻT*ӵɴ\V.ɍiކAi<+7˹ebG3ҝHWV"}zٞխJ>%hGVFӳ[{CqfMzh7O#]]26֘o׋Y<~Wl1[\߇<{̭]s Ɠ7Io94k =e[7tk76O4e.>>/&zsx9=08 ý.Ʊ4X -dt,}2iL򠨍s89$hTOil8n=H?X~뷟~W럿O4QT RL_"`M*Oz]kUߢkapdGz>;k!{_|Xqzmr˭H7_o~~NE8vMpu= Mb~D 0o/2rEU 1M9.H;>2X_uAZ3 `#ɗY'L債1Q`_Xz3D}cl{voBUqg'mjbrCHY^(&`jaX"|@qbe"u:9ԙՁXGCgwZy|W]V^p/P"jV"䦕ѼКh,wŤe$ƈ wW,OhCBsU0]Y<[e C]"FS*Ia94F0 t /'Ⴐ.EXH fi2^_嚉urjSbx5%μ!k48b/baM>G>d|@)jX5^W姜,*'+TN9UEٳ_{<˓VN([:<uRx`RJ*:66";a\-͗d35#ɞu+$[Xֺ jUY*KZ#G+#6sPɡ 4u˝EiZ!x@3!5zPy1D+o,2&n*.1@}iy8NoR~хvc1|J|WQZo=&0uR WnqY(z1k~e ,y&oZ,<"+go8^w<}hYU'sf~{OY|oƩmܪgycr*ֆ븆XF&VX߽&wTdyIL(Kd$ uZrKgc2K Лh 3SVNnH)JErʚ0yA%Ho,Y@qCf{Al+WgOuNKh>օ8j8q&]jWj_**0-`{c<ϘNBKS}@TƑpCMFԘxlF!0{̒ 'bvAO9+on'T>HFj`d+K9 ֚bcb.H{6v|4< ?MoQ~cĎ &f% P4(4M\ЩX)}rJplYe.+A[?6)͠QhCcP.)G$+e-%n1]AjH.j/ףv`7x1 w,S 4LV'pLL6K$D%*|Ms.\>\ILl$ "$3"QY#$zWj֨/ aDZ+#"G*A;I"5i%+=|%Z%r!J&m " /rb!^ mg5״kYk+(}O owyB^8;Uwбm6dlx>Dd U= dzEC1*yvBOFEtFYf05Lw5ֽۡۉv($l9sgBP*$䙌| >ǐ,CXBZ'y]~xAsBgSfMib>a hZ?d)<&=<TqVl}Gvz+jh@ Z{n<9q̹c=|dv9NŐOGhSVz:UoBMta0+?V %/W7ȧKWJ( Q K`Rd3`YuR pFx%8 3j OYdWg4 ,JƬ۞E|}5,e?,'m[\uMU/ӫ_tQaְכ^o:7ܪ,MDT6eKyqJI0 ҒmZ%RdޔW BLgKxX_=wkU~ % {=Tpkr< ʪ<'gɚRjs]# ҦcU:X %>D C"[:'S,o aK/h~$4K>^HGg4C:?dz2Z?NHѮ~*koeZ2i+*vERJu6I/Z $Y­#q>N\_|dm 3(TP0$"!{NC> |byan$N-A-6,.?.R3?i/X">3g / Sf J&܀m5?㽻^py:X.yz0~?0/}GO5nBP&JCU 5@a ;c>Wog'w41 $3K@!ff4H FXҒ:;}@VǠb BEXs~%i%"!bY$yr=ߗ^fZ㤯o|Ϻm,ؔO G/&&@ytx!*kpt\;9}Cr:z.C( 'ϭBG#=)Uvjgy֯@*w.pUu*RjWH-UUIktBp2z)*lUԹIY"\p+竗39=:H`0q9?-\&-}:LJûW=\97B?z$_rnSuHQt,ٟ6{\$Y#>8S%zNfFYuIbXV3"Z$2 GBx(Rz됎MS,zw ]{yk\eC^&U"8my܁Zr*-o6J;SݙR84Aq*)^5'Ҽǔ(2k,~LL<ԷPSwD3}FNOӝyGQsCăT@pyeHV -0@Q b>@84Jri+zu~r;qr3v i0 _?`Nו+gz?Mܢ })q/xC"븸uV\֫X~l?Љ5U~~{@Yh[yyxU LE=O|z}77*;dmThASp!{^Z%MK0퀛퀛N`& )%;d'FVF. m栒C4/U\!(;8q?:!9yRwWCd`i+oUg-.wd>8rMkD,i](mͼ޺ K(;y]'1=_=_5ʞS, d7q5;"]n(R:g7zt'87~vl&s"Wf.qr}7xr|]'7sZ[uuwaqvcr)O,AR  }+ eFsNzAMcwYo]]/it*59A&"F8גۘuΧCor)LU^R: ܐR|ʚ0yA%HoZ(n`-XٵWM*>MۮiYW}Ux>+՚V'6=iz:O'MPEo˱;ykЧ6rynǝچ^/·wGj -L poIH} De eDɁǬ=hf, Ilu%_~k߹Bz1c]eA ;/~N"=4j׳J f0F?>fd~tˑ,k_ {ܒ#P!-.;g{˰Y:'W{9ӽtjhT@ƒT*e3t$ZMzXFPܔ˾GH=^?NWsH]~lVVKS^ت5[r9i^4~AQߟ^è&y?Z)^zw77)1񇣦oD䧋yMf߲sO50oZl -v5#:t6ss#irV'ҋe?\-:zvt<_ssww`[]dWw+9p{}Ð:RV4|z6 YQ1|Κ-OYؚJ54'Qp|~xwȅ=|o߽f?QS(]"(`~}7鿽iijoѴEku[[hsz8,Z|?&Y_ w~ʆpDs(.Bb.SV/kTV]KNcmBx_6bfK;ҍ L =dked(ebU\5_zKRH-bY'/d8dT$ ʣKgp CRy{WOw? 9]2FaR3udJd}If |d"o/e.k`SdY)O۹ @@ mֆ,7C0E}ݶBD|ύ[Z{ՌF|njF}EGLç{;F1baݓ} 6}d(R'^iP' bF:j*F β0@oskʮav"`F($aÙ< =OgMF#8h}&8\ɠAd K0 *uYUhoŹ}Rc%?I3R:+ˑ _CR)&eFF0 ]4F E)V(H<7+H1HM{m7q|K\C&&NWVrF>mG4@][m:M`y^.iΙ#Ȁ'-Vl9{ /GH.pސӋ)xIn6 &.ñ̖t#Ͳ)ù`h) iwa9 R iR Zߛ )}ТCg}6 Eon7 HK=>xQUЅjvqo%V$XrtY1!t,T,ˌ$ݥ+!v e*_:I nxbt>ؾ㼏d|cq>sE;A<]Fv¼QzsTl2Pjǻ8/[UY6'"RlʨLNyˁY3LlZp U| ހ#E>׀//۞[Lyܕ7uаȷ9@k DdMDFdqFQQp1܀{ؼoR]iuo6FNH]m R֤8FxQqLb$M(׋+#0N66 ~>$fG`jc6p(JQ?CsEy9pQ^a&zG~MlHE6<~ ׯoW?'oq"SW食a{t>#5/uE:jrqwɬ1?&Ϩam*rވFb|zv9.d-˫?)#vaxM "g_qHXK%@W1/Noe9Qt9ϴ@i#C2*UT 4aRe'}hTY,oVYBQJͽ&mWwb6mM]gчOO *5) VZRP (TVqgf3ΣY*{8!$mr*E}> lNȍB-*=kpol;q '$rkX9){g=>3}1ձ^s/7Z!B$.Dɓ[sA~h3$%8&mzu)5dX)gm"!O;Ɯv"9O;7q6ņ&cAyB8X:)5*uM}+&ris ?/ﮨLˊJIX`JzZ %/VpHZ3Xi4wR[  x̸ԁ%1'm>xf͖g`%IZ{^ YFYN?h8 -[DMDVdl)K$ɞ7qԳZ>B3'x@-n@b hƘE2IB]Ys9+}؇8^OFLtƼLH)տ~tXX|wP` xE a`Hט=)c jIiҘ\TqcEV~dǠ$d_ځ&.FĸI7k;*'K@&ּz)uVz)uμz)u^{K1޿\ Ut&WΘl &k]:;阼.Zm4dUam}b& L̐T5-RG0Q 6lP[\MJz,vK1 }#ABf 8x0 Z)fE[mY%9zAo]{c0ƜBY˖W唉E$iEptiMǤ-ރ}4A`4Sggt},]>b||Q 6q#FJPVIl Ѱ&[!6s_UklXL! R*'JlDxy(ɋ[Ύ Vr~L~CξxgZ̛-FqUO ?\O|O̞N,q ?VI4_ʋ|`[M(XXXۓ6Otin ˯)/f3Xgr'w}χ g`4JCge]8g ԑڱa[_O *l;\O57Zle͆ /XptㇿctÏן~O<L~Ϻ!CpO+?/ۇwV]sˮ(N~ńЯ-yM_Rw>3fZrg oa2PM ~gU-a%\if5?{ 'Zb6~1*4w4Ճ GTaf_'c='w}5G Oa#)r?5vbI ɧ!3M*|H2Nf۫6,1r(܏a9`ά_3( &)RƉ7:Ԧ?irsTd hx7aɼܡ +;7?J<Ӌ`O}D}x3P滔W'wCOE_6;D:`YEq=yl+>+dʔe$Ms)Y'%hL$UpPgjEoJ} FB0B'm(IǤ6)ɴF*`7cbf?r|ym ޻p$aWG*3-&2O}mf~x|@|UQ 4gOa4b7OT1}y>wʖRN:3t19"쵯+-fo8^w܅}>˼,Dɲc7xn{Q|Ʃmܨo0~6QfޯDk3?KvVho[&y5>%&" $u @PLTC`NɅg5/g daafyY}ROQ_)ɣ:8Tf{tN ΄j8;dx5:%4_=} ~&d/?L@{/ΕvӚ=H!XMʖH0!%S7%ǐ @٢+)-b* $CORQɂUUR  g;2nf)V/X=(FTmz]7*_ivOYڞy.FGQ~e&CEdeYԨ&enʥgЛKI4 U I T¦jI6"3E.@83.IǞ-E[ԖGn5/2`Ea9*;  ۰>)!BH۞c]4[ȹvV4`kRlKf &QшمV g;Ό@y5g|"j%""4FDGčQ PVW!9E'Ԥl!.Ԋ1P[ V76F+:PiY* uHL]NLY"k6툸kO!Z IcI%G+ܛ>{XUzYw;Rb#%u~P=scw;G횻ʥdQj LA6]*Au ۡR~qߩ_Eu#pG!3>IDq "EZǜ8"R|>Ygت>M/#t) &]-/M>$B2X0bbwo+yq3gZ7{GFYE%}gy鴷 L40-^d;%C?)Jtq8Dٮ(,!s!ФKf."T {vH@7R˨jEVcԹ(&gQy- APe$, ߘf7Ύ1W1Pwyulyr,c4k s6T_~֌'ʥkֵ?$9tzc*%[iɫ  bV2S'iQu*t@m=""TY^q =0|(X U蠢 /)EC2[*A4B2*x,1MqԚRxQz9% +zE0:'R&6zGpvȳ\' 'xYj_ wz3a>OgU=}xpͷ~D|[t7kg|- '8⃔`1.8^U >RS1'!!pfxb G6sq5=K=X ==|kc}-EφQ{Tdb@G ThT3#۷t*7@ ᬒɫ4< >Pker*ZezoGX~ݕ  mԳHQkn  ^x[nRNqEQLtg$S\KByMI$Z`!2(oWWНBM|.X mbzݹxۼܓ.jxPY iNY3AV{K f&;V7\LpX (nhI8nliKё:-2\Ta~o{F )-0 )"8+2L:owBR'29'6ቓ6ep5m583kVdm-fڊ JbYyD:z }XѥTzM@J QUn&fG-/@]N >{y3FsW)qW,{EgIA!#d't umW3 if0 AakИJJ}B 3u[@ LP/7<ޛu3~町p==[מ}6r`F9'e J "#(`,@>F6Qk0y0%b f`l)r2v/1C,HK TDL͆]Y߀_]\L˚koV =mSӭ+z@ݕp]xF. ;_X6:d$==WeyQtl'LEDçJ'Ze‡yO~#qdzc:?4; ޻\T?i7i%ۯ I I/Z4$Yȧ@ BgBzOZcǹήm :P$E\t) ޷ݲt/yZ~XҠsK7ņO {4cw6Z57l~y4pzQ5KvmY9qDqfx-t10&;qbyg..J9T 1Bc$>G~@:jg9@Ts+r l.yc))3$KmDVvڀS-3) :{>Y! L& <( -jlP0h %lZYh}ԛ: )?yҒDfRܝ$+ʜ$[{ٜ׮OwTeGE$ځjeVBK'JV@k& ͭGzv.HgK$&Qkhd<+>&kbcI#Жqȉ#/a"1AdES2H$ɜ8YWmWsZ^A6Q.怎@S3@P KoD)M\[Վkoew@Nx[Ag&y &8ELxQe|${|ZըVNZ*qukTdș$(٩ h6JHJ \:UW[qu}(zQj+5 A(el>S9_,J5lJNJCtx׾rl7Ԕ*ٱ(_)ʷHI~so |l~|\~xz>g/KA,7w}s1a~[}Kf"~NhZN|pezv)(Ձ(4Q*EZЩܘj1` *(ۜ/R9*Ǜ!7m(>&Xˮ>),4e1BthL0Io#D:3WeDk%E#sx֎ bjњeetVy]2.T0#B6{١'e^%M//;/4[Zw~`וmi,ܰ[ftݭ›w}yuC'֝} 䳢ͫ+W~_p#AB.2‹괥u3o]6fr{棪kjϽG=5Q{JCI*Zݼ`ΓEYp]jk:r?k婟 [N/n_n֯6X3PsthKXnm~ HmMx{qMYJ%\&I#NiIC% +0نw,'\F  e w)q!X`h@zC^JtLBYCGcyB\51)isqǂ,uYK74CMLGVvhI*ILOOdn2C@>9^XTLTcvs(L掙1b)tj#E*ȍVUrkj'F3> jtL:J`2r Kf`"*1p*Z :@loб#ұi e_kݷ>Y*gA;@@T$vߣ5~Zյ~g/N4$S?hdiyA]>>*8 L]cN/osvfnKl7#͏߲}#Gmt9I~*a [!z1H[dQh8僑D[anzDOց_vە%0}_]ѿ <\zQu]vFV2=l&b7IO(F -Ml:% ,!W'ci!WTf0ޡ!'5'&'Qɓ"QW$eJr8pJrwdO﹜;z:>SdSwQ^u1;0g"PPD+1[{_|yӇGlvg o'nbeTӰW*Ɗ&x%~6Mp{B8OIPMN6`nP30I?, 4}RQQȲJȳ Q9d8m6leFUs7HR K$Rΐ5 EĐ vj⬏%n![H-Jd>no9ⵘ1īO=$Kz| υg 36#t6qdEZ3R\4xFޏgQ?n _!T?GRFUnj8#'9sn`Smw_]ΓF&?wP`rHADXBJΰG$ Z^Bʒ{f tj@$2qLef+a.wtL-^-b/6xuh|/32h%ì鯽]Vvy~jw]LNܾ^Ih1朴ʌ)RXP%?2 ^ D(s0 p`.F^Vy˹6? tNr1]Jr(4]VGLGtYE23b.KwObX__RuT=Ya:!Ny3*{S>bPp LpQ"sb:s9 =56iΙ9KvV"l6JȜ=}/#$h7@RÀ}TJ|DgTq +-!ٻ6rUS<@ː?NJU4)K)JP" N2$CĐ4[R{3; L%8XGNԒ," ,'.'399n*hHr`c |'zr<geo;y;Ŧ vEoY>epoG%iXTp6I(嬯Pٌ*#TFsDjϢGj-*sΌgtGULߏZ 2^fCVKŘAb#tXb`vڀSN!"3rޕzkLD e a`lƓ D,tx"vFvXнA^J闳Z\BhEv;oWKR Λ2D-ݗ]/]j}ywGE).;ȕKl9H%x5ɫHQ+Ek& zLr.Dg!"O:$Mad<+&kb:3DzFH-ȉ#/a++0cDV:`Ɣ -I2ger9;Y-WuN~Dhs@G 3@P0捈"Ŕ9#FV0^py/]/ 3ItfG`c&i2 !WKR=CΠV {;»2p- 9s>D@j KjQBRJcF;{{4Ax;EM4kPNdl>S9TK6%'!8F8k_׸^ںmTS "|}t,| M <י[ LN -,)%Dpc%`4^^<%j_3t`ҁߢrZkp-U}mX-!)5('>䙂MPc P<WxutT>( N՘s&L̖ u0vRsZSYdsdBĽsGdr#}~O3'Dx!w:̭c@ eb]4-"6ݤR* dtN)z༷ qAe&s#w;+U^1cFfm~ GUr让&W˳ 6否b~M]ٺ(uŔ-]ϵ]/fuqkDn$^.\۶W"\LeiUsww+:nn7Mﭞo>;7CI*ߢu{-9w;{nxvWz%k.q Z{/۹>vu&@:%V,ޕLDLmElZo4\2dGV_` gdZ}ϲ=l В_B@ƠcBII43FR1F#gdnmtY`Fu.jg4փ!rcJHK4!h%hxTWl7,;2lópSLgb&p;& $ +0نw,' /uZK  e w)q!X`譁hx*阄`IBש"gGmOuIaN*e.2X03}ZG{Cblb:TQ'$!=-"& ¡tZ,mu"aD2&yQ".1b)tj#E*ȍVmrU3F5^퓌/:b)("+ȒȜJp*Z j $#琵oݷ=b* ΠL@E+3эp82*i= >+ga~mF3&WΚX1"FUr }{mM/F(7)%>W5veڣ2z8ڇTHRk}dt@T֡JC4rڒ{O>D|h@[j>At^'K2.A"'{I!Ru$Rg(^Pozm:at(Q)L98hGS'GTE hMI`-pOV*[iC#嬳v8.Jrhp9ų10+J!`4$hRDjЎȤNJCAk v٣^\TޅūQxuzBlgo ە# ioM[]*$;*2׿Q71 rh@ M~ y6H20aH״d#%h) n:M~xu0x~-~fqUdNX.JLm2A.Q+p =loF6xȶQ|V2Й'G=2"xJח4AY6MySlCxMggy.NDo{}|?7qD30KlEԃIAHŗpk追qjho1DaO=Ƶ#{[|ژ= K[ ?^f;6pOp֚dUQWl&Yt]TU ^BP|(1:Ҏ L zH $_;g NHƒFy"*R[z B,&V@(!e2x$sHDobᆌA~SMi]^<}[{O9؎n[ <_y![;/#v!븪_z%G_(óYVnHB#uEQe%ѕ@8+HXO`񇄈SUOVYI9%AWƲQLE\2bI3 |d"Gf8Ne.k Ke|)n3Yls! %\n]'*O) ﹱHg^쐆mC)#3>{>gt =w+<APv}EjW.H,*'o*NW,o},G+j=J"4h `JIGǭM6HFGx"F6LF**dH_y>rl >}`8m7nU|AD IN]Sݭ]Ԧ-uT ;-d)Ty;&KipvK-/?&H)˭_{&}H7.l{! ?Ѯx!G8ڕn{_l|Eҫ| QO4gG|?'WT+ݎJȥ֎R@i8L^Z];l{ґ h|aw}t}6Mֱ~~o;k 3 Mja ˑxŋc>>ozccу55Y*l♯@.e?:BXrw*=hAG҈@5HZ+7ib\+O{ΑB$=}&d}&Kgwo:u{)0 Eu>цnOהKk'kj6j=e=wA0ZcljgB{czی!Ifippb*apVd˅i2FOʅ];x'*;ޔv<[n˗'<3ʟL&E\5zhTs:ԃr55Yho/7l)Mv] &0PqՇ!4 n.'۵ؓ'- v!s=uRLj zL6{kQ;UNimR>ȇ\K$P!wBT`5+Ԓ!;DԠ*d>g7z6Pˆc_FԓQ # #UI9ԫNk[|qFAlk4Cr n*J'iwEjN7o3]X!]q!YJg7#.#~*̭Bc>Obǡ W(9W[VWz{p.7~ FϞo[=b /!M#ta-vQwFl/.eὫ;qܰez~^?߾Úh/ࢣ.]mx?sن| V|-8ͱʫ[txrh\;y{z ɯ|sc(l}_~;F'5 BY]'Eѷ/e>\[{jEe_]-w_^ &k& ~~SU\-|7Gk1~w_dzoH2;pX-ov=rvIܫӬŋirK7d=*aT0.@$W(ooͲ0g6s1Fysf9ŻՒu\߻WJٸ8bkmˤ)ڗUbYux+7}wd𞳈Fh/ _K6qy7#T÷z% Lִ-oa,"4%Qt=?)8U-ф:WkUf72d(R͚a\%KX<$iusc:a}mͅ>TC@(ěS S6yCzx fePvFhhS\O^zj)  5ChIJ-PRӮ]J0&Z[C,ŽVWJ*=IU>}߼̵*=TM:SAd*|,ťş|=(%Tr׎aQUuP M54xNuuCoaST{Hdt>®V'Sɇ5ihݜ|OZ#vݺ{cGZG7Hmm FQU^BPZ fqb`1vj^Vj!G]цΨ|W!'c#ZZal%EVAVzk#Ҋ ӝ6*˂whBJv l$SBAnQx4:{4:84ˊ#qI05 "ˮFPyU7nJ|Y|6$sbi2Bck(|-K`1גu*xv#XEIinpb v[jVَp5TXvU s&E0W  -t: ŨQA"R}W:a j*6 FDE1b2ڹԠ %=nR uW4r e2a[̷7`g}#XY:L;*q bp0ujMPI!0E|Ȩ 2X¯Ps3@fih!\=+AtޮŊ2YH)(J892r"XT5শcBnk-=UuAT"z_<4 YcI5 !ѾH .JDBࠄf& Y,kOA! db9WTU/,dPgF({n R5xldb̠R욵IkB(}oF#v. Ttl<+MH}1[Zm\U&1:@FeP|`QBąAo%CJ$JEdFS1P>b1w g \%f0A 9ɐPr#VH܁m CKEU*YȩՏoTG}^ qG;ێjP$ `M0%waEUKqSеfME4=R\-?bj"! 5y]%@_!8.#(v@]IPAa%@rFE Y)֎ a<  R(V%E[L4U3R|Zr@0#PXG%ܲjsPqab2<|yµG],ZC 6(- VAP4u(A6ךbdzlx]HO 4%鰀b*ӠlCi#8 8%oC)zЭ4E<A4TZ(FSc6\Wӥ11D 0r";@jLBER!6/SJ,;;)Z-, J+eo`j?&lzFo_(TxtR W[6d!ˌ G 3ijyGT46iץ3{7O`Oj'YGT&Y3x0N ^Q;'ГtJ@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Nr*  x#8xN %7.>I'Q^@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N" HN t ģ4Cu+ >'N '8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q=a'.Crp  ;Xq=E'P~U'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qy@Z;(V~|[M |]{og}zG@RB xHƥzpKkѣ7.%4%cӗJiՃ;tu?n\h]=芄z MCWk;ZRPz"]ծ O+x gotIdooz &GWeЫ _*Ì xEmeѿnco^͢#KwffwlI4=o&8\MMpgzp܌Smxe\XUj$Zo|Az^ݺ 3՚R|̇J *o_ڻ/K3+t~Cf4+R][.jyz־-Rkg<Ńm36/WZAep6AM}gemJ}X<\]аt۪ UJiYi46DRSՆ_{+|VLH㝛UKϏf$WyrKOn n&U5UN`YDL0*Ijܢ-d]|,//Fc*.[m6ooR}1m\Nw[ޖn6*;HGzĸȈĨ)*# &c&XP1Qz2sYhd )3 j-$&%Y\%^Fґ0b(ɌٚdׂSAlq$j:ltIAd@1)n yP! %ǘhaBg "*LĒ('8F@"OJiLf<֜xX0}Hx*>EDq".jgEB1[s#\4יJEI.4d/zb񷽬ZBݽK/ba5% ]Wnje%%E,9ޟexAWB:N|T5($<=<ާ,{zߦop}Ь>O2Rh &%.`OO Zcڳ/My;}u-zp՚u ,W%fhOD}o8$ Fi%*L9w KXF3JI,TtC2ٚs`nA/C7ޠs:77 ]C+|9O]hY\\k۫H|m=5w|eM.-eD3wY!W@@7V >ƌSR Q=1)lR=>h}2(0j;TSkMs5g=h}AԨtJ1($%#x(h%Q?cYS M8R@^(`E9o`y p#"F1z㜳 s !9R^G:fB!qLbTMiH'/FҁQ&,KR* FQ1֥ti;NOSTָ4Q^~1zM34y.} /U[T/-* N~{xgAHq"{v>f)b{w4d{TY)"g"IcLY+;=+gWga(FәNP&Kd2mi./o߆C#!YPW1}{zc5K׿ kSHuGھ]ՔWyvu_SDlT*hiMGal7Xgjշ)FSu|6h>xN̶4F"2+i8Š[W}6IOƿ/ހ-;w2Zjµ2֚^U[5RiZͺa< z0b'Uëlqލumn}VБjeZa ~L]*z^$lM%?PЎB?†r `8~>|ÿ~~Ӈ߿|xkq=0 &p&yY?U5Է*v@;Ot:݇ҕ7!r J~z= Kn OZpZi9!-@ͯ2rU~UZ|!O 1GA9J_lmeG q'& Hp!X2V aXap-F =ɶ'mX`^Q_8p9I%OEY*p9DeOc ÄnQ'ˢNfSɆfGeCn.v><rYQ2.4J&^5_s'2uJK"ӝN M[xU,K,O$fbBSsɋ9.#S M<}:' e~tGYJ. 3!|#RD|DA3XR?m t'%`9CDmI}~v.6S{"s"3A&ٖt/Jm,ۋKW{6sM_f(m EjQe>(:pp]g;BwE V*UaD VhQy$hqcO |p[SAdLǜ !oց)(C,{I"jdĊSY }S(sVA-m=3{9A sj~*)1@~\fvZC/ y bDFC\$e*LQrBcJCclB1Q1m%C,ȓqdKm3#t6F[%!)(Z&1 X[mR+PIl^u'nR=  kԽHQnh7nLFfK9@<9psEL//ƭH(!LAcAS@pwQ͜F2P@`CȉǞR89c;;g:n}lo~:h4r=hGsmrO<ޓ. z:i\m;+hn|&5G?-$Z:ky9fYsަĔWf-GSaUĽm'/\#9W/M-8l;{no/O$BHa39e#b K3c4Zr{Jw<,<uL{(9m5N녈DioUS5,bV ñ Le<8:Xxځ^KEqRklȓh{-Njf'UkRܝ@)m|OsI;ʸ X|MH̰J(," @HVnWK/yB$d Wi1TydHpy}J$h 6GM^ӴCH!RNiyt1m?9Y^oC]9_l9d/?\tvJ=S2fB;E8q"o SaUT6b,0!Jfvh5Z@DDȉ;tۨQ1,%! jC&gk~ʳ?N&v\ڋ[dOST厸,'g,03nfWGsGC &(@@~:ôBwGI=E8d-c6xs*H {$y7.6 ڑUIG駟OHf$|&'s,ȋ9&5WrMj&~ R ssñJ<170(EsuzQQR8ŅfS"` uaE$XLjAuACNa!FxH/[QհVw{HdGzla Ayå O=KcZ1b"i+ZI}kX %ma*e-'#w[ /.|5Mi0?P%z߆N{7i{r<8nvKmvwo=</Z \Xyanc~ u}%Ztظ][S#9+~8q@T*E<وؗеۋ1 'U`h| lCсݸRJRK7{ vKfU6Mz-Yz6wiB/::A doP eٙ# 9T)$UpJ8'&fum#; // ^TaNs.22@ jJ Ҳ\Ha&lR}*nw^2⟍50V -r1oZl.Vz .;޾ّ}cXw_zėoMX06 ( H#$6Ed :{  Ա+^BH#=XL/QBNIQ:YjA0S|hs9)Ƣ9'6,uHч$@ZrZɗAa3r 5rq>\feh_6g?ƤOOmu}7$š&j3S2omhb2*}Wma8^9!r-iTW 9)M b)mll~l.7nzi.$[^s=?ȞͯnN(7-x-LK,,JK͞YD ghm;5;Y kRoD)8!R vR` %o:63r6U&:cYeXJ06lZ^j SXMv(.ڪ֕ϊ@@. 5EPsIzDM)A"R%3ݻBIp/gO ܱ^54C5\_㬼\IrJPJ0IP[5t( j+KXyOV*I j`ZT͘KCi+ {͌xvRL݉c"4W.l:aj1~C&)ozu䧝R6wrYi;!}6E].;| 2v(1ڻLK-oR[;5rCS޸<e3kxlN$QЀVIJbA+M]wI{ihB6\1ޢdژS6&Eg QE֪Tk !"I D&n(.y" Op2&)JʙHŤ:r]y?'64a`?پbm\!*, ЖHBb"xJ W^ CԆVR"i@r'OAk,Æ% ]6]f|2DVͧt²V,>-ה dl@ &(QBl'bZvUp2U+3̙w1O`^5aQߋ?; N 4ԀE+59$X޷C'y^29 v|w' 1v@Jaɘ5! *8O&3,D!vy|;CO+˭5/ W4Ö@BȐ|XG1`2%bX(0>rBF-felXRoK<r䓭aYfςQ*Pu=TqS)֭ukE{R$"|)XG%V6m-U^M~mȼUԳNؿ{߿m"K)g1d[q)JI>HJd RP$Zhr,*Z U HB{!F/ ٲ8|<W'>9=_ 3YDujL c[[*,{UM|O4fQ5f#,MGpfG> 5gRH]0ҒZ qɢR#?0r4bxvG]쁹D6|"Qel5!5 ra8-X9(JssrVq| .|D;''Wg =4P󊬜9MVwqGie׉ϼ|ˏ70M05݃5Љl5S{ϳ-{o9jw3KD6TbrGy'9*'0tve76yx,U|<2Mó՚zX|zwl +f2R;6>;l'yp^Um#98*goU 4Nqx/>~|Q*}|g*cL¿DT m/0yM+>_qdv:Y;%"jk \0kh褳e)THƉ3z Lɡ$_]_K x5ԏL,בfЋuqUEeM|`/1&;/jIGt~!x3aμZo+WbM AqώCݻnJR^~9nMM<-pE..m 5=6B y/u4_:gQ2ӠPEssE-`LHE- $*FxdTESY$MtQRvXdP$B&(=xtFP`gl3O<.\%b^ϝMGy$`ݗA@H[_M;K9?jʻ=Ik Z 0Yi[Bi.MD`= GnD@آ+D%FQAYD*Z*ꎝ⣢ K2Yiu`PZ#c3qv#v;y'F$bREV̘ v֖&W+Rd>;ބPhe5RoH Ҫ¦.Y)O'>gH[g7b)mǂfǁD[^7`f|]JVzVVgg2zBad3JY)mϻ.F[ȹ~![щa,ؖ ek`U ĢgBc1(5GYNחl! 0;pvh)GF_ԗ"QED=)M8)Eb&Y\J))f'1{uΰcPrdFBSL=WⒻ;nKwlyL(>( &Eأh <54$ E:SSQ5O9]~| Bψ< yVlK] (5_QD}BT|t @ˆ4 %8~,o+hqlWW]?m1]MOT| \c@n:mh`n;7wy=j\Nv?+X6|d1t`Z=wD;6.{4Ef%g˖/Z]|iK7Hr9r eEh_`BАI $fB2EΊ Byg$<%yb8?sC3fͽ?w*c`%kBo1{VLRC.7xHD`dbEWNZ#5:SU9fsДHGUŵ#wCt}3s=W<2F㹖WEC r 2y?9Ϩ|]]d/~M=cUzh/|aCq—;:}y]_Y OfazϿ{EUäT&1_^YY-^{=[Cyn7ˀЇJc=4tvlpk1a*ԉﭙhw\&,.~ɢw;YV_zQEl:_;oyةDn=ͺޭ{ӄgl-qy6Yҁx_!ڇ~ #r_f]q]UNFg%vrRL:[юc%1ɑf?Y( ILEXT+rΰM:H~C=c=F8+}R-bϖuQЭ>%eP>gMg>Nt9CșI4a5ң6bXd1XȀA4%b f/>Ȑ5ޤe,sJdu`*I,L|/zz<;?,7^~ɋv̼ߦX+ƍgsZLs˒W_K0u}ڌ|یL:OKO(Ͱx}gڢb1yv6ھ3{؜M83AF2/ *JK5lxB6ptE2$`tM)p%Ț# D1cBx ̜E6@EojMGaYУak49Ȳ8$U6iw;=o|fNb-'* %ցd":E*S.RDP_` erT*pZg1"$RPŘ BSDRXcy9 sHQty6IVuIZ- "2|rt H*:&|Ól%u)8ǽٜ8}=f{H˃g̔$i-}Akty aٮfAUǢ|ͦȡ+*!>3(mD dJ5(O2.RPT Lc5 cMƺclX;Gׅ5Rܤ뛬U}$pID?Y9$&lꥬ^ FHapYZrdjh({|n)ẓV\OU?Y21KҕZ,0ELfz9*mkX5UoVESje A#JDB/j ٱdޓHQV,qlL횉G;tL>Ђ m"Uk'e{ O?l*3"y|3Ǒ=;BƎ$|_$^i4ML5m Aw98kA<9(XWsUu Q1} ICHw\̥<^=:َa*h6x.yll m<ݷ+v6cܤ %z@yR1&?3?~y{zwT_;R7>֘MV80ڴ%IsEZoS'ƦER-:u))3Q*i} 'Y^ѩX\E:4$0@CNeJ4AvYB E=S,b]ƑSN58^.jH/gl9i(g- ˜ j¬':]4P4!x!ޣӅF@58^^pwc= fݺ,Lo9l,o~zDAFn^g_0\ h6;^'t B9-{C#hQH ]q_J%֪T Ѡ "Qn&]7cf G 3R-&pƟDpNKR]d$@bOURԠ5-(= Q(CQ%{{%˧ &OWXM{h<}@>~iH@a@(mW48$ɋyκw*ڢJ{wX Jxݢuon 7=4nQ{Ffiouww-cγE_pMѕGwtL2|{c_570pܾřۛ K)94-laDh hsv: ݥ+N"IQҎ cІ)`ҟ"jμ =bJoByL!zkJFARd0 -bYg5VWk8n}IAvIS̷), x``%~@GKb}bY5襖̺p;" .c uj"ppnS [\XLXh"a6D*nrki>W]1kiȪeX:nB dOD0 4z4tE)b{퐎MϞ}[o듉.c[e/n3ߚ{9/T$s65<2e}fiM92P_UևۂtWsyX"v}ڭ;Q\hn\l{BL:/lxQ^^ZiaoQ QYR6mV[۬7PJsf}MW5vg &:mqM4FL%iN*Cpk[hWr{w%vsZ|Q'uh'Su=H$Ga$=zPz}AXZfQ ew\W4|gC>HhUFTx=Tg;nv1ʺ>Y)fB 㙅M6ld+(E9%kkF/۬"o8^PЫE7D`4By$FBl&9DǵB`ITW)lX<[+RRW k J &pL 1ZoXrGb(ڞH=#M7uUnQJ_oK]*=~E ~`?? MܟUZo:A<p~w j1ňiql\^htG{d/|3"@TR UtpijxI6y "Aiv5< ph~trp:Ztⷛ5'gT5I+z:j`V_<{KkգMi8#MimpHv_fW>vޝOIeZ,Nq ?V빽$O?O;'{[ؒK{RNu#v#]MaDJi|`ŲGn +N(^{ tf~ֶ5O'Pn}ަi{|p:$__T(⦆df7g0<;"q ?|_?p᧏h 4AيZ6=_5Z|?y[t-ڠ৞OzUHy-SWKn@R6wc0lgpOp^5"Uz\|Mj~2;m:l0E71*G'A Gn* |ߗktSHꚯH%syIrb1hD2BtChW5хDS;۞۰S?w dMohuHcv뇾 ͫ_+Xg!6aDB\FIL# ,61DѰL?C"l{18Z璨+cY" xd d {]I3T"2PJ/sA\#!!ZGbgEFɦuGsE#^n̵+OTzrtv)Ki7-I)QW$li8h@ UX,P8WkKt̼@h8ɱLmTt &Ί>r$L/2w.١_f'LheQFR2I9Qkh%8A`3w*yTQn (\I"O<"MQHGTAi *KhdL g=.1@ ty8o\^9$ biO^#ͺ^M`fo_%'&c&{mHfvWj+?A&NPR8s-R RI;2 &'afZ%zRLWgGʚ.Hzc&)n@2jYsRMew!lq<>c:֫jj**,y 8 qiBNCCX-C1c֌HTȀlpagԗaMwǶ+#"Gĕ3gUŹ&*&j˭3N.B̌D]@b́18YEqJr_j\JJ3#8L"n".A(+DY">Vm8D:.\vYMJEUE=.h=rO;%⡮[\>;P, l/i:ȍ6@9< 7!_(R:6KV5N)3*/bzR!')`Q&'>ی};vN؝8#D-g^,XJń<[.{%K ]!nBBG.d]~m2=p*?c7pmS(u22d֡3P3Ip:E<6{ oojw&oD| LS1~L'G Xjz>IQ8?quO< fQ^t#3LJ/@ӳnͼ5+;F`|7'_AwƂ>(DSݧ 3]$_Mw3't&-y6?> f7?_,^`fwq;`LYuAYn+ހqpyz>bX^Ÿ8kG%E\L.DB7rM ADN&sܳ6FmcU \uosA}_ %&Sr"RdW*& y=|F2D"I?LA9+TnUؔ1WNxZ)4)FAz\ EDlAl+b%p;%ja:hfv߭j6Eۗ6V:jmEUD#@KxJ 2d$ :s:aO>BO<_drf]dUT^:.j!sSL'8BrR>Z iT:J!F^i=,H+}zZ,MG PкA=նqa _E獾]hڒ{xtg,R92Y, L!KIq3'RZ0~g9kbgI2ñV+itgǵp#]؜w7?Mީ.4_ o|zݩٸ]smIp6^U~V0oC^\2AV{aūQLyw,qLL,#@!pwS ޠ#e5W/[,y,Foq(cF jJ D5,9PVSSRxyb־vy_s7<'_Ң1Y8o\HT)Ry@CR>r*W$M]ɏRM6mv$7~]^XˇNפ:c(_鏟S>/ln\U޿9tr:n h HcU#Gza,v KKCPxl/3;Y?,{,Kږd vV]EVC:٩hj WZγ`|Thбs 1>z몷!́\Xۡ)^I+?w|R1rqпlnc]mZ%Φ_X~Ϸag-id[ )XXgm1rXu1 ` Qz䵫cX}B%|Ep-A FU7qV[8e]&/ZEywL1\W "K6"B#eb0étƮZ Y yUqqG#ΐr1Tspe%+gW!׵̃S'kĽeq%D7xx+%v߳2Ux[h-佢MtYh@h[|FĐTƻ?oE֮&WC^Z*{m.G;•C ovvuTڦ0Wx\=q"Sbjv}ɻ_ԮD瘇}+.Ƒo'6wS97݄svӨ<nwǎ, YcvˣYt*]z̊=Ŭ>N<*vAGc d_)J哭k"k+[֢CAo5VBw&Ѝ.8>^)o ^k"(V_&0НQo]d}9ei&QdMYW/yᣭyZ8g7{`r;^˻o֢C 0((1ѣ8Sb{ȭh0 hT&e,w {rH! U։X2I<1Fɓ"%T+kSzjdA(r mǖ`B䕈Lur)"K*x۳&Κv6?vhY=0iVs6jgr|B`k NR@rd)TD  }4LOR[ 1aUVgD)*E9&.*nTY7#$=@ޠV<!Nnvڤ*kb"ɁUMPh" >[(08zq;UٛN<mW#uiAS(V!PT/Il]dF]|)Z8&8MclܲavDD5]1TG=ߌr^2LXpQhT&#x SZϙ`L c-ph`v:zӐZl7A.š41_2yځ$d $Z,!aH6R!gMʜ[qD|{Y!K(j]o:;rnNplѥreri‚ O+~2\lƞt׮VNFr][?eaf4i䱟X6ɇ'Kvҳ۰o}b1p|+'fW>M =sgη )5Xja]JH%HydZCk_'>*2 K:=mʧ&ӌ}`]9pU8CS>H` éLJūqkLZpO+ l&0\]%CeE-D%qc;/7ęw :f LȁF*7P%7k'K χ .W^Y8ƾapkhr̾`6QiѤJ`yQ7ko.~3m}'?t-?؈ޞ,8=.o;29MFcj!tv^"Bgbpbo0#CX 9%Yrp %Q3"6]vPmfJ3ٚpðpƽqf5gD럓o,X"f;8+t5V!2RdV+dPƤhcT:`hMP+CW.oȄn&e_|Lr(hM1 β6ߊJK|y2 z7k4wηHMx}N`0pKR[dtv!32ѪZ]]ljq9:㝢4}qpkl(;;l){of=/Oy7%{bDʪmZ8ae"i6-c> K8 %v!1 Ź<ILnR@ZH>K_TRdDL>d -^Uͥ0+ gT 茈G%j.dmNҗKTk cbAPAR_ 1;V>b0`LTQX.[1YuhcT@2k`#1ph+q6L^,k,]ۗ8k6쒥kP9BV:DYA죢NG !_fvX 5m * ]Pge3e08&5$DP!KnNܔ&k ѻ[R>ڮƊ]|9IjK6b"r#kgMB,8jLpkpA{퐍 [{mDVs:,C[m;͠a;p;ہzBmhr3{Nm;c'w;Ps24:l֛4ާ\C׺n^oTMYb; -!-Y$v 7%M\})ylzGқ5< ~U޸&7DZpUR^b ]޸&. H JﺻjR=W<+߲i'l vo0/]њSOo)GWXmia-]=LJر`hpWz뽫 nr7(?}jaZaGW30RCeGRoft~z94hL?:-Q#cBU2]Mƃ#7&7e/nZU L&;RMB7m1&{D\&yT٤8W讬xmNǧ۴qz4Klt/Zɱ-w׽sNB駓Y~qF3ļOW[35C bs \׹o Q4ͶY_&F/y6˘ݻu`P UHYYۖHYk('Z-왴4d{<$l>g,'\zg_Go~ywG2|.x$7* E=i?~*sVKpX|'?wⷦ܏?>GV:w1$/ӟsc]̲Uc8k pu4R)Y:iw:6z"_TTKdmxw @6xM`4{𚸬Ii^#dl}Zs㷳=:炏]dqʱfQ-ю#="=yA*C*jOg3W(6*P0 ApISD.)!2ҪF"BBƐkGDA~Rȥ/qVÏP]֗*/=_Pߠ}<>r g[Dz2ߢDgJ,|{ &$hL4*T2XnoM:< -XT:dJp61EkHjt6s"F^,l#F(r  27\1NW.Xd)Vo{YvqA+ˣkw۞=*w_B!A%V)dSrZ"/0h4j=@&Q᧷4UY`Zk evU%,IP7.8iGؤ*kb"ɁUMPh"Ֆћ}P3` 6q8.نnc)]|;Ϣ7H}]>+wx6YB+"^YMZd\@Rg&ؖl]dF]|)Z8&8McD_0 ߎvu^։F&bW7yQ7#]9Os/k\rf  opb6\kLfDXQY0Xl:P[f+{2-vH/i5NM}K]wmH_eq#@pn3L_v`b,y$9+vKd[حN9nE6Y{μmCEJ*Hb-g]{8vHHxD]D[TRt|=h$yh@QXECSUrN|B݁BApף>/$A jt5 κ* +:( Ց ȽA*bp7K(TNJ\-`*Q9x\J%櫨c)ELJDT_Uէ |jgH/g4j0I hkc0Rsу=3*Kp3n$i j ڭgw+!{yVאYW+OͦmpwPHP0aޠK9jioY+IyE焫YE)Be8 ^C:qMBHhKsDR @F(- m.ܖ>!UNxZ)F2FZIIlZlERk0)?=hX펶ysn<Çm?t[h˧M8D JКYcv) :Ψw<mV6 }%ƚ G38{ ]Qj?o;Hf)y[jƘ&Q X)+%\׼*KehET_:HFNDc'r/%r'*Oq 9ZU1U\o23`UJYE򺉮M.]i܉W8=E;!E*/Rr:kuVYPkHޢI$ìI9g)kgB%Nn~IJ8KJ+I> K#1e@twx)xD˺ٮL hOK `گ8.~˴#A`9K&l #A(!"KMeb m-\RYzkJF瓊h`Cy:cU۩"gG=CrJ;|;ô`,Y`bOhd# V 3($gy$ؤΗV(],tBHXID.&rH[":atE$ꄳ9 质|F54:=Zd5RHqmHJDT'B +Q^Nh17c}։סbؠ DZ^h&1G;D+N642e}fEHUqPvN_U`BZ|GǷ8.Q"kee&~ /,3a!#!cxY޺%3+ B-$0&{ϥ FͬF/F(W!%07{@haӖgZ'h)49Өr Yr߶ m;M)D}8_ZVSDE|!o* zc*$gIIc$>R6g }ͻ@֠V˜Ja掫w(cF2Xظvf7NvRʄ,Lq3 9lB8gDkkF/6V[Oϩ#ѫɐ4) @By Fm)9BJ):06R) gk%e^-%u)d W"Ie )CL'c'ޱfzQQŐ.VuH{i_͟.lWR?_|krVBb^e^y>!ЛLU.+;5x/h$S E5);b⒉kv)Lga28=L^W{0 VUN$NƀE61[LKpKO2;epZ ZKw~P3\J%;#D^OSC{ih=1vCG:>p|q؄V4{<^Tke`1eq~c3풒7 J;ؚk{Ru#v#]byOp8`rDO_ilsAѱ,wwcHXhJ7rs~{H?4*Ш5?Mcasi9oC|?w w^8b*u$hl~yK x_{ִ57Z:Cz6;UC^#J~Zޏ77n^ݩIq®ЃWlWM@^K&j*JWP'~-ɂ*u:%e7Gϩ5_@,$Œ9[TX" a)DgLC qA? t/g. }OAl;azqx6}b SZRl0( $n乷ң.Z(>ܒp5TO\(uZ1LpzsֻK EQͰj|`m|ĐvA/\WqUt%ɹ{iTa@B\ c^LVNb*ᜬT`!e&|"qwЕ5*k8:璠+cY)4u%SYY,UpDx !IX:b>+WT6@6 JCL̙Z0#sP5rC_>% sO)t{qJ: zZ5eNDf;tB|\^'%?yFYG)XǹܦRCRl82&#-afZJQ1[* mLWgGTe PE7dBh-se5roQ0Ԅbq|r)xtNѱ bqJ+%^Uݗ)B;x0& YiPys"2]TW2.@ցk5f#sԶqX3}\*XÍ %UĤ挭Cn(8T^[慮{+H7E%x8IK(Nn'i:* σu؉IY 0"8tSNZ9gУ`-cYd.E QHQئ4r4 `3{,G dafɺm"g;ǎ!, µ[[Oڂ˵gu\CkV|p mJ{FeZF<! 1p\1d4v-YRt+tHRt"b2%Qd8b ft![#g;?8kڰ*T[戼G:\+I=RHQ9WE@hd΃__<}g[Pp(v2ū)ɛ%`qw!iۃ;$ 'MN)3(ҧU JL rT0V1I)(X|Mܱ'cO"rG0= ,[μY<۔ZSD]كd,!&p]w;Aġ}J1<Ϩ]d}ͤtTLnvC.f|tHdZ|̋29Tpp/Fc(o?bg._xUy*y]y&o2ME.W|4}sqU3ߟ{yl3\*HS5eIҋ>3WY7[#W\3E PNwH/b{l]|m;tI^L˟1't﮴&y5oqq\ք_\/j~׈x8ik7Mu^9{ YVcvt3^z=},3p|.> Я5HiIQ9qRT7 $WIdΜ{ߨllBz\ HP79K,.d7YXSGZ3(i,san]U|z(TdW cZ"zsi4Dov:FͿIh@c heT* ugI"Px"jRzn-mnfcq"<}=azxΓTJF HE1Y2hd!CQ:P {'UG5;p t!7K/)MA U]}bPP:h\#%T^VS+kH4SԪz۠8{~yD!߭gN0nm;TkPg'_r#'+2G'Uc3%LHFbS BI*ҙ4ȆG;6gx:t=6c}:f[ q^h|qFRyPGii 4<SU?.6qžt]aWl$o 8)jXY%d郧zhjӭfסń?Z v u/P:; u!d#‰W}╻ZXN a> vK*e":"Jj0x+7~c9]JC {N룠by\(^W;ۯxW5UU rL~oG%XQY?ǕYM5(c̣&gD'.8EϏo̷Tq\K6S?zdS9쥗57ccJyR"y_53ZV2^r ?`8.ϩ{ <Ќm0kᮞdحJsM|Y`;fqOZ8U-˳Ӌ3Pc=!m;7;[z{ҍ/N? sl"[VuWq=V\ fjl9w|&pŽu:V[h.]#F}hw !5$I@Brl) & 9Meʧhr} Rftz)3}簏`6dmQ-f֊[jŲ񒗔D%#)'ĆIwU dXCj=jȱ+mqC3u#P_vfb6֢n(w@.󳡀*g% 2 5l@i h{wZ(B|RۂG?Y)d@$-mK)6 5n6%K֥ska0ZH)Q4^ d82 P͋L(-eLS]&HbCcL=|o8C]zDL]x dN ,`7 }P!IxڭH9wM:JZd(1@y;E-X0\,g-@Oΰ CQY Ej>'LII?>'bFSGiyo<$-U4k2quP[KWKmMԝSiT/~|2HŮc9 ZKl@J㇓ޞ4Td@CrQ!b"2vL:0դb}#} 8g^3 qI@J0 Bv(&ы#^$܋H8#^#X'aLD 9%Ce'oir) Ƣ9',uHB KH-yWd=)J ɛ`]P1>l&ΞEk .WYàͅfYnvwb>W<#>+6]]/V}ӳ!IzC;E[PΩ݄k4࿱Tw70֭`:9_nޯzҸe策wCosw7{잫w#}iFSG@T,V.3Ss5|Nt %`P{{#/9:-1ɶ#BZk͑J=.Y^ c%ZHVݺ 8$!uR:`({D !ĈYЁs1lLeV8"`=kW΢ OhbiBE@(v*%IrFaA XX#ZJ+qva8N-{H;/ ba -v_^y-+> )P*,Rs6srJ[GAk-4; !9/K9Zgz!!Y-EF Jr1X- 5qPn{mF3ҳloi(_6.t nN>h,a# j(P*W%rGʻB,ʕg"IGtXR>خV1,?>Š6:#RD $U*2PIqLE)2K0RF{9V^־*[Shޱԗ_h9"OE]p^/:[w 3A!H!PP(Cpwq}̎+>8>78>8J$( 1 LcouH@Q%$$`/6] ІaG'F)ŘPOv-P,Jx#b,*N]o#7W۶~l@;$X`d=e3?òCm)bR]+Umij䠸ȑG:zyRCTPbbE*C02W\p:] Xf|CU8^+c_T6XΧlp `}M3:^%1}x:cj 0cOa |z~6{U:)doh|DDgg_gsbJ#iiA["\y,=:gE,h3&%%g j%C[:M]==ݣ^ eMN37©ē<(8גT*RYJFd$e@Ell"74JΎʚA֡4H,y9 EZRZ͜G(\tdDKvݪ]'d%Rٱze^lVyB v`L(ɡ:De<EлY^7u8rѹs%=$죀rV"nd!B&yĎj\MWBcbjŒH>]{զ]ghh44d #vb1)+FUvI7Qc0zyB2NfE{1 ) lJSF 6SlǜByTHfW˜;Nưb jW Q[-X=WA%)EɏDy&Bc2x)|)QDᄏu664W!ciD2%QaA$*R9h5s./.eyl<Z)"ʈ{D>3E\RsMЖ[ke8 13ru]Q3Wt&8N*Q+}QN`g"$n"%.A(+DYb Q͜G[ E]%⢪>:\+E=Ų^Q9  ,8j{\|\ܙZޱ)xa3@rÚ̫vg3Qv~|XQtyH_)eȬ(h|YITaoQƋR'BY|KJvEUnޔ }m4q oW$Tz|BYՎ\9%) 1G`qsQFEe_`{]]p:99\s[n1\I:yNY DƘ Yg=#@JI,-8ZPo, $a k̉V%*"-{1@@#u'?S=u<5/8ۛދ=XQJՙep)xf!gMH6pV䥬HxFftS;iZ+\I8' r&2D+"o4hKYRJqu?M&X gk%Ǽ4ZJj SDD%8&RJ$SJ)\VbHeVj{h=N,盺Jev(%jNиhWY4pT]K3G|?@wdsZXRf@[dj:21ňiq9;8FJ.Ah[ήveh0vޒ-/(ִ _ےwtafV= Z'*d&V8?dʃ٢R0h Bt<ȑhOrm/>۰>ÎGK[Z]vdYEipS%ϽtB)-'^#QąNE绒U'6亹ع}&ܶYcT@M^/NAO`F2/V^X݋5e^W=bߟZd*rpU? \Gzw(Ikp4sx]zB\=Jv pZ +͇^sWnIG+o.ef7LP:`H#&go:LgwxCC#.:EZ~yn+VJB]NYF6!N>/65qs@1.<`2ƵRaNƓv:LRg"=a3zeCYq\ژa.^uR\W>A{:FJi|}ȐR6nj {u7imʰQI5ӋQު_O;Os|?etT*'}sdhu)&  IΉ V7)7y:* ron."Zt[`= S2I%++z GVo@+ 詣=#"{W@JpU] vp5•l]&]FSY>poosk =לr۸1N,oXf0N XM`4ҙs߻3r-=ۣqEGOar4bnvˎ.!9zn<@ـ6ӑ4/Nb:`|Jc^Ũ5 Db)z,Ġ4WZl_g+s\E%7Dw:4efo:syԫ*@ Pԫ*@BoLUz{^WUz^WUz^WUz^WUz^WUz^WZ䞨%Ŕzӌ.r/Z_a0z|t3dϧhۄ8=h?HdM Q4,Xj{+>?ߍ_%=3 iS7ilL);qmmICN+ouŷKsD"mƲSi0)%貇T9E`Vh1鯫QxF!IX:lϊQJ' 3x9dcj<F g0bmxӒgO.u}ʑN}~ϱZe1`#WԸ!ԭN}:Plf*WݽIIYW; R1FE`i%>ȲslY֙m4dޭm4^g٨-V2"@ޣr>IFGD 6sЃ4*9XE<'Л̣s&` \ V3a*8]bz3Jҟ[k+_˅I:-υx{K39VmϾ +~$0qAbzJ|UJmuzv9Ç?M{ &slkJBơ!FyGM}.zL6{fث^79+ ".qkU&rY5,ڌInNIY+A,sqOFEeb\)"3O$Zf!e:^d{!{"ɣ.ԞmDI37©H\KnS֘)LH^3>ΎʚA֡4H,y9 EZRZ͜*\tDKqjjW2ʪFe7YeYJ)2פ,޺}ONu|SΏ=pOڅk};˒LUT~8dq<x4 ireCPƓq\L uub Ȋq䂠S}\*XÍ 4$UR9#c=R kyƦX+cYpiQ0ûM,#=5Uvş} h/؉Ĥ:T)'JBJCG%œ3Ju\V:K} &cRؔ&Q`)cg<*O$ 3KΞeÈ'cX 1wydž-X]ԞGڔ "S F<!S 1p\1d4ǺKYRt+4HQt"ckŒ(\2D<0"QYKd:*a5s./[/eyl<D""Ƒ#2$R$ArAlKÁ,`YHzgzF#Y=3QZ+wS"G8jč `{F:$sN|$Z~1"]sH*5Aml@K_:9PꙘ"!d%(p:8ɐ%R\fl׈_>f~Ց|͸d_7zh4 4/V _g|ܩ?x-a8E?hG?"EV:3nsPG-ӥtNx4:GsD8Bp'(N eQ"F,3bvc)>yRc~ rOgS 3ξTJ[k^t6( &M̺G XI(tVb jT:V@¨yh W?]4Ɂ5o$;XW,o8)j%dُ&qE[#n}vjP<0XhE=K+ Ԑe ^$m02dCbAWNڤGDUrձiG$-.n0(6NVyJ<<@8O1ץADvdnh_imay3Y(~[dY&]qR cZ`ֻ6G%ؠm-fs51^j!Ķ[Y4$ &[AR>YI'*h7QSCfB:.h6d6,^߰]A[7G_ pzK,Hoӧ7ML5m^I>9Z4vEeb]#b3$.pfU+1|98]=;|ςryha}92 wzU=}j=ZQ?|1O:x >Os1F/ yz}zwO_.D%j0$E$t!yRQmJuv{ v[AZ)EMx7kO%n22A |&iYL.mFlR e0 o fn` $yV?2psj?oG@VͲg dY*%o tDOZlSV; 19/K=Zõ"gdžBcKV%B2rIdQFF|6 hfZZ͍`oơ|[!k^r,a 5#I,t9U 1ԕ\$Bf{՜1 wc1mjc@RŸ@k!*)2T &y F*>ho#i!dn#=IzmUwg<;ad!gJ{8_ilin%x=`#3PE[HJnu)ؒHKuܥni5ڄ'9ɕId,{%ߩ16f]UU;`Cm^]Zר]RPwl9˶eZŴ Z;[X-+aW6t*e) i7t{)b* ݶUWYZ-FpƟ)\vY\WYZFZfq>!hg ,3ĕy~*Xƃf\ΊK*P;hIyAā`hO/>5̮%5u hprͥ!Ft],-zR.' 0b`ebX% ,.}.䤅WT0}e:[g8Q͛HgG*=v?Ky;óAԉyYrt9A,JJ}nbb .R<6|:}:zŧ╬߇uj>)/8^aMXZ'(W ?T`E}ƫʵ~q?aOاO姉 ahb}aI6ǿ--іhKK%miva[Z--іhKKmiD[Zkmi,x4\xwmiD[Z--іhKK%ZLK2h3<޺0kXRi %W JTPF`¬jìj¬A{3ڵ2'xfCxJ1 LJ.ZE[NcBk.47 ?u ꖁW@xJs.`^+-}j[qR2AP  .$&JF`ɣTq-Op_쐏-N(3@qI։ "K\i?޽i%jۛ74C7ӽ{={wh.sT״{u0v1Wn5Xx N aèU'.ޚhw3p=E_b4/-*~E*-.PA YTF:[iwV";Fsڤ"yŢ!dRou_<m#^Cl{roj'PEmzNdL }L2&$ M LL;r(4NH=P.ڠN#AI6Tߵ=u44+rG7jJ^o.$m_tz2ʫ_R;4gMJc{qDIdIf%ĕ:ZNziS7I_𠄫'/q6}f!u%$ރ#"HoBչ&Qu ڌ zuQ D4<#"@Pkj.Spfy[_|M'.{l u]%0egF]UԿJjd{t˺H<>7^ђ@(&Ҹ JtVثnJx-U^$c׀,L0cJ{!`8FM8TGǙ"UI5^Q Z$v 80uh"!$,YìN*Ge1qp#doDM%_&''awH&4&&0I9wBQ O"&;}$X"+Nȉ24㊤Q$ +DKQNFZN)}1qnӸ:#-u*7:ߖA)d4vT59 !Z{ٱ[dioO̅RޓEB HL6a]\.b>! bV)xouH (BO B2N[,NInTؘ8w(vWi Ҍbi u rߦw>Wۓ'?M?z/;c=A QG4LqOM1[S166ڤB sYYʹسe4&K$vDD ɊxΞiJ anL;ڔ4ڳ25o{ ej@z,YT"jCMhÚe A'X3_ԸfCIHe8Z17VGakB$hKFDi$*@< ac܎Q9<6n "6EDhEo\pI hr ( "3.Tu)EA 8G8 M+YP3.8y #u+f eqgs;"~98qqR&6:Ӓ"oi-.㴌 81I\޼Zh& *Hp $9JhM`ŭw6C0>0 , b]|2y[|['iU~~ ID'glo pQdY᧷hTǗ긅 u(#Ց%L,8w1@RD.*.% !B>tf5ۈ BoӒ Tq~}ͤLyjft{ V٠Z_,Y%-|. wcx\u2U"{'t8W:SIϿ*L(fXRiB g7@uo=Nԃiz4~mvl%K\W$CI#Sۚ62dzhA,7AIp_~䨒7Y4ohcKRŻ5XJ˥>oGw7 gv{ыrȝ4HTh["O9rhYgjC &w96M 1? t!\%}*-HSTL$ۈ1\S&(Ũɨsg4Y$6zcܡ n@o5kq\{p6A&oywϸ6ynlMV6d,P+*| {(.h/<ԏ3P ȕwqYO33OCK>BO<H%QDAHq 0I RۏǺ 8ՊsEF'u{Y?-'tpt9v 5Io4P% 9PQ?{q_엋/!Hgd ;ߢZRLR^W=3$GC)Q,+zWWU6^(FZAyٸ~X~7-6lW#ᐌ]@ev700@fr͠Sr.p1@z{8XgRQ VR,x)TErV#%DE.j'.J"{1?H(!L;cAS` hZw2ӕ뫙qj-'z|9&LѸx%`9gt/wE\.rW!8 ;+㟋wmJZ<*xWS;4MQYÔ? iwv-yÝp%]d\[:;6 CeHk]:r2D-mZ&,ϸӤΦ#_ux7̋v&s̗p o?|Bd\n^;s[ J& 7 ZpzkrUuVxqhٹVvӔCO;;Y!P,w*008X WbOF@2n3~72s1`Z/D$J 4prܘ0xdZM`(щ^ez].avw{./ơ3,]xw\qR렇}'D9Cy6H)~ބ񨌌ďjhK̂)SZ@BI*c7<ƕ«E<2Ex2 {)c2CHAh_ywʻҶ2m e۝M;toKs7/v E/{sϔ {`OQ_ٯ=',?V-;u:}˓DVA\n6D{7uZn (ʆe1ggM+q01MG״eKֽ5=* gN(O*+m?[_9@ըw`(K4/fGtqn-gd`~))6zAovN+~ևi>FO~&2ř868q-!y1`_9\82P&#$3HZ鄒1z.V@hiB$1yYP}: egs&9OP A Kkjn¦31H:~QkSC/ObN}L !0X-NMYJg)/0hRzƹg7|v3 O¡@|JuVXIP!rLV>3(* %B# b!hDAETKhϲӑ*Nсu/-z</uJ!a%)B xq#x|!Q)fЖ`ъW,>ou9cQDD{t0A2e+#*u8TeV6S͆I =A갠{026ϘhxĒ Vq ,pNPzEpU`68^~z|G-.j)#ج5g<:k“ksMn9$%)jH)ӹX~N)&H:Hb*ܻ஦1=.Ŏ̰P!x2klv߄C庨"z?~b෻ Ec¨? ]o&Wr؄7?>ƟG"P u7:~c IS?3f\ JՈaK=sj*M'24H̤1ܜ܍vxw=[Ƣc\`SX2ti`bqb#VT/æHX׺ʺ`g ZGK!`S:P=x:FY^1Tk%iu^ŵ9Y'=B~H6l[{SiL=f;[˵&b3Xx!rF=/AOCKK)FDoAs[x-"Y ]l:EK1QgV\I 3N&Ve{ Ij3wYe=oDpJ>tI`ύWkgc(Qy( kc1g,Xo/$YM`l(iy^aZNg jo{MȲQfRen(o R=|YToǥk}wH'LLgL,ŮLAFݧΪ97zywW 6A/oMm:QM/ b m.!%֟Fv;&]yjnVF vv>y9xIJ ì̭!ܸyA5wyuMXP3WNZN Ҝ/>kLj &Mrcgho}c\YK͖<ѡ >+"X4Z}b' -' 5T~' ɟ]RG4rғH܎Wcß;ZZG'v^p-C0VA&[ HJNVn=pAoY\A7_7*zS.xN6CG;6_M^lx(s퍶Z7U~QO~<㞿`ӢN&i<ŷǻZVcW{x- Ӌ2,@M{W'xQOMTy:"B?#c~I2\Ks/ J!{ `c@l0LxB{15#oǿ.S$F6*8 ~.ƣ"+kB:7X蟋)dRNԳfM?~"BE +nKðA9\z4޷5W3 %2TY3Nqk&J%HV`n4LiU.(k9$ռ\]o@*dc_tU'MCv:oUU )^(~F :ճQ(-Q箠T *hEF\%p~.JZڙ%z T?al{6zmO]ڷ\)Dy[ "A֨RAki ڞOI:Hfb Pv<ʚP L,LRc$Nj[1nÿ**[>:'viuU'o>:ܩֺ•>Uю1N!oǛ^0B>@(o) J:yJ-RhG] Q;JEW9ӿ('4iqzT%m8l7x&agH{~] ʄJwQh o0Wu0㪸+.~hg"*cN0$mWUnb0kd\Y;%@*d ́TZteTdwXp0Dq ‘&'MZ$U6/K!Yq'uUm:*6ˏ7;$o1-?'\$HC qNQ t؅m"#>MI83oͅX1+^?=ߏ\w%+O`*jx?SK#:֊ͳ1b;twz8ݮ,DÔ~6)>dk)h+9 @:guXʮ Nl 1c=/;LJJ)F p},S9#r;kZ~XzK\-b*t.`֊ER]zUw.B:;~> Nړ07.ShXLJk+4vۣ9t%`&V}UIN#+ 6^K|Jt5M]$DѤHJ4U *鞳0G]^ѪJ49ukw(tO9{P =UIB{BYB) CP jNFWJRշQp0m4&) + ѥgP94%&%bW\|!9 >|&HL'5}6b?1}=$ӯQL3AZApb(O㏦>\W+sz^(X#_F.u18#+?%/s d_6FO'THJsUHٔ(i54Uտ jW Rؓѹ)DwlXAUϲ%Xv *kS?*N?␷rﬣ h2&^E-LQE-eL/F]er͋#TRӫrEA$Aj?_0gWj5P>lU Os> w|3;I0ֻKcӺc|kcn s[% %\V!J@$#nN/edj5*}suR\nE-3'r:q!4@ ¯ťt VPQ>wH YE3S(i߆OfнuY[Cyx*w'Q&/ǭU5h%iNP7z@c!Z(^O .kUphEa|$FޛCn ۡy[ww 6MX(K?M,_?{J9/8%AӢ-bgB{\._l#;%@oojqǢ''>oD%`@S4B80FcS#Ӳoڷ?۟iSt$оjZFAIKPY9E2Z 7uy m)=0b6Bă2LXGDKOOe ޣԷﯡ̟.'WMCe׋pv WN>Tz^V+ħl8;Ts(+*h#^sjD"+DN%ꍳ"h`oHN;kd4߀FVG#$K3J1&*yb Kp1r6KJ 8矤&^|75\mS̻qv}s599y\sXT**Wm~`VmMN?AeϢOfsSw_ /-rm/**\݆6D' B4T&Z"tQU_Wf7HeeA ~f/ӎ/-2E2/HN~a훹|az# My'saTTCKHPD3 ʈQ1xΒj=+,,Kuq\T {œ)tqkof$-bSD-od]vlۯ<߇)J*e@P/p3,`(UHW$"`4uFD5pQ(a$EѪs.:* L0&dJ Z\T\XύHN%eL*$gSG'LIEg lQCWw4+ȐcaƦk"zl]ࣇ1/nX&ؼ,{#v^JjcO[T͢>mͤ.-mܞ}mmzlV0nN3G&j!~ {:UOy^&JLXv5R\ `<%Gba2S^ F|~ޫU57t FFP2B,@!K#S8cy d\q'襾Է*BZXB m"Qe7t āyAd!TBgn=Tӱ?Wثǫe== ͻ#eQOZ(t6}kYu:>xr'ӓdl<^׈9Q;vR(c &1]>_uG2q&_䀯Ǔ 'Ww-E-Eٵ{Ǘvr/t.p|u~z' izaZU6 E]9"w ]lJ>n;5Gϣp5f\=vP,bHĈyGc/,}S@%Etx:} fv5[X*o-y靷/xDZ{+i}Avut5yڑ3sYwvuj+6\ @̎üHt/C$5cp6=0Myk6f0$4[2^CQcʵS7ΎF ss.`mLP3$ѰrQ5ԌP ᘰ63f>Sd~_C SzGNY7EPD ]=o{0J: C.́,,hS%IB֠3"qE0"n v* (ç/G٧36H6RGҪ= 7Dᵆ@gF0VۀJi5=߭ =U\7֮vЃ+}cxsb2 q\}M=Ӿ03FK]@Px"Q1#S.  {+" Niw. oF\ӈ$ZτY1D47 \8%&F˹B88Jko^yDIr(4X*|-vmk+sT&jp# ZHP& 80h\I`"堃r?ţx,2*tۋ01RCaG!'5'>yMЃ%';ONP 8= Uj]Qm`+'A?DLt2iJRBL" D,rEP /)'R]6"Rg-ךROmHni.*b-Θ yq2=] c QA =#/jLM[P&W%@ JuUbUX˃V^k0I$.$K1|ףjs^MCn}1gM+S,d 5{lW sZZQ?rP+tُ?4zGWT!o&z{߿uut>ux2?u};Z6*Z'|[ҥD׋n%CRڀZ8*!sA*[G[#ǗQFU 2aX~,^eWf3"1hD敹 $}C;|hd\'n V&P@ShӪ7~l8v0 I"0ԔL6?'g;/)]iٶ;{ar0z: k}Quuq}:UZ8qi8 "hai) <1Π"7P^X/T)`(A +,Ю=& /m.xh`2EKį${m+*l/wCrf8_IA%zD43S2 J8g+-TZVbW6ޓ.=p9Fק{Yfi/o4eiכHoΰB\sY^zG#P'B-]E[#g:kim{ rG-}厰}'eUyF&0:HQ1JQH8b+tL*ƩEJz"+i/=}I`o?J*5ƠD@ ^gd6b4\v~!seؙANl[+/YХ^Ӿ0K9|?px0 9y?(4%h KR$Y!^ga}ZZw43хIJ&2_ 7n٭ym]]vO.| 7a.Ƶ_j/5O?k|mUYύf|U렡ggI|ՏU ȩ2^d߭sڍJėOq:/UVҴwAK2BDžwvs]Ki*82"-#T:W[߇lP+1px/obDv ĉzT,(*oL3XKSyOPcBE:B.<|r#Ā|QmD8*\`Q֊tV9oU9zkA9u}o1 ~V,˥73|?˯+EQL$ј#2ǀ0N0Db,W(Lqk^0ԫBȍN%r$(i2rX&Iߚ?L,y"*`3ϥ6@4KAދy3n/[Y笳xF5{S2Z&pۣ}1h$ eĠ=atH!M2%pf3cL)9TOH]Zec'J1h4< A{3/1lR]oQ/&b(:91}_5MZGbwxjҤBt12`J7կ]JO̞U-$KUUl0 $@(5)B,)0$i1|v-qO&8 w'SbǻiYՐ$ʑ)R(UD0tZ4ox8wX0(|}1ai>6V: ih>qDGIa.FҘfwWH6_ږM`v0ZOQs+.xٴ1-&wF$f+{*y#nxU7|e7\,#k"ZLqßh,]|<'zrx0]38VO#J :f/:)>4ұ"4WMR5Ua{lUI?KXRM8}`6WqprD?|Q_??珿<|!_>;iUewJ_U|>׺OwmtW]S󮅕t>{Գ ߠ_'~(rm٧#a0iv9Oq8]욤U+?B`8Igh5*cB,[1Y-4LB86_e~QA8׽:=ғq4e%Կ@z%M^&`X]-bBbu@\6sWy(lʷ.67# ^0%LJLb 2p5<+LNQF'e 7t*$h,sTL>ǤLg@t$DNmKaZE-״[!{rcr?Fs+K>gY-˘ĬR&3&j:'Wֽ`/Ld*[*+V~9Ad "1 k%ӤcƮw턚ޝ6VgKi#d#=9ߖN!h,<{8"e \HNNoa4WnySǓ9jڿL\̒x see^k2f:RO)vrQDTgb51`XOf0'9Koj'>3v9:U]a]yڞ>(/\@T6=+ Meۃ'8NVO~{C񍱏̱#YrdVZadBhM\PhS)|r BDz"X֢y B)Z-p&ne.]!uLWαxK*pΰ\C\{k 6&fH3uLC&<"3.E-;Y5w[rVb4T:$-:0k0钉ۨIpL4B$Гc~8F/gO=6l G ?Yk\IdL$0ctT%y,5 $6r82"7#I7NJ J]F\LL:l"$x.bׇ:v(R⋓zF\d Ke||k-Z+`,i=B}HAZRFD"," A[(Cz|qk]a]1Xآϊ47{Fya 񎷜On(8 '~dZ^kqoM .ՙVF椃|1J/-/uA[~*X23%hcĄI)$x$(P4C.Cb@r[2`)Y GXDS4Ԋ(>X 8A¿DŽtNb7 H2$򬌏R @ "1㈙s/)Iilh4z{ h>aE›A9́ X(Juj傃ŧψ6$WhP%1ޝdFZ;ތ+Ee|19 bk0V\g,A)gX8Gy3mf4z"tM(/]u{]7[ h%z.NѧVt[~EYǗ8/-djV4}7EmWjnۯơqum ;{;7>f̿H!C@b 4Ew$wZ tDeAmhU}svfxz57&zWqN e K>g#?}7T(.X9Ra9Ѹ/JO߭N7'a/+'ZRSFF/|L cRZWJDo2K]FDZ(c `%J{V!Ą)4hSj1S + *u&()W`jv4 g]7ъ0Mz֣}%ǪP-? $- Z2-vKޥ͂rOdzB?7]yoH*Dٷa x$II6gFֲ%O<|WMRmIeʖdج[7bMl8 ri>$ >e"qP$=Yvx5$vzty/IUn:`&p?ry;憩v?*%Y >*)ei~%ə9/V}Bi+…d< [X ͺUCyuj&y6-ϧܛDaHSi{fK/Jk#:?|*SbL?nVe̦d/^}|f*vO&Ө E1Kd`2 jV=^e j-f j-f j-f{Լla5[XVla5[XVlv j-f[XVla5[j-f j-gscwFmN)Hym}(2qp4z;|x= #z!Uxd!R&ѭ)#"b1h#1;\49nDv1Y5/-b<5G:h֨{L,c05%a4UTsc%b4:zp.V;||\ȴ ׆x"2FKp`drey>k){:rي]%@r7xVF̧\b 9w (X;KI,Kz?8[(܇]o'JfӁ(&A1TylFq{ϵs̤]s`49HszZG*#R"%D2'Tf@ 1:`@[AjVQp/TpJa"qR`Fe2J<#Rc]:Gt|Sqk`5SwWqᯇ/agoJP;LwfO!bI\e:aa_eKgV`RHI(EKv͑L"H) bLQsۻ<(ƠBOJ`pUtNP&$Kd2-!3r%[qqЯ2` es.)H٢l/Z3>v/X +2Ѽ3nV0T 1{Q։t8`t<7Jxx&dWީnUS~-Cu;o/'«El 19[~uONz> A?+`BChH;[:G:_5 Y9 ݗ,G >~LVAw9b>f;^rF֏:}U.}WpБjE\SIs`#!,>٠}w7:% TOVuW3;W߾I?^ݯo_9:Dۗf`\B"{^,H 1/>l>M Mm0Zv[5+ƽ)>9tfnD˾-MTr'm8W9F~kr槏 _\EI7R|N;UY %m3=>(7"UL,vyGWm㝂Cݏ$YK2Jj=8K*! qiɬp/F m S>F,PMT^ER(i%ՁSeO0XsySaxȦNƓFi':ruQ9|g5 Um/)BEMNw8NwEnz0/YOb<"Dޫ}i'=]<NӨ@ x,/UЊ΁X$r "LXjhZ*S DD0C-Ain5轥RxM@  S bikרb2u`X0iX{)) nȝb΂wDkzk1r6YoI=̊fp7mQ/ fJ-w}5Ep7XRӿȧ[\ !F-HXT{+ǩ2xR["D,yH3U<(h|[ci5y bDF_$e*LQrBcJCؤpDIƴ`iD$sݸ^ݥV-SGO<3Zhl5kj,`mIi B%V=#/iBf7?01tQw2va;ِԄO\7ȋ2ո`IE LXI5EPqYf(lV EB 8e yd `\1R'SFxB^kvɠS-QщƍCADQ!@pu!a(^HϔHɎƃBgoe{*yhpR\[w'7f-UE':ײjN kvgxvq#ߙ@NJU\[UUu5٪r4 4!$,z~v&݉s 1\;czS^zP"*'AM4nX6JMv%^ ),[sSz*ՠS)7Yx@p \/3^d [1&fH0 cWҵC(o n$W}MISlp&Auezl1/J~x~㕓9sneM--rk&)3Dא)''z},enRrSI,Lj2hq OI AIy2R QݢdM_q8+o

^uA=8_"]5ՁɅַ-}^m72pԽ׺IJum?fZ1AurΠLxN/*6Qem23kv.;¾&m8~=sS0vO:QLqOK f(s;ʈG`Ac@Aq1K-94ŞAFU%m(D@7X,BDPA*) QG12H,')ַ܍oI~[n5c;-eV`cJ& N$ (2FBčK 5p㈷w[Roy7Փ?3#0IkGI:@=r2o aX)!ƴ* hS+\[[u߁0@\Θ;w}$ߕr-*yJ7 C>3D0kt瓷;{IQiZ1<9$ NYk} 򪭁L T }&k"<ǤhNjTU51g\ VGdWP3%a`g~n0 t8# XLܳɋg[SM"LE{(9 gY(܏\Fp~?{}?jkmH@$f~T8kuHȋ߯zÇ$5HkX9񕤏+mCRyX'~xEB8Jk0ЖUs83:8qST<[h+nˀou|g}ZfP) Ǭ]GŔQ~T|M@5ON3 Ƶ= _*uQEQwg۟k^X$Q1u ! VD1BA4|-"E2RH%Ϧt™21T]|KмqlE²MzYV*6ƥt6ɵ(d=>XD _{ܙ!MD$4:-*gx +2S.dqubqCM[UO/.C5X8(&^*bU(\>xdcnq0q4ܱ%rɿ'IsxY}*fS*/N)' Zs@u844Ӡ).S;;Eo_"⟶ 0V?[|Nhg~mbÝmQcpKúHr1 JKC4@V(EbhX4ƀ%w[ !R F%S69k5Il&Ά׎j5LFWU*aДfَ2@Z]󩍛Q[疞oP皘bUF)3c4,yËIU;ҵׄS:^ P S{.:]oHn޼Ao{o< 7ևJz2oW%)µZu4yu=ϜvM[Mp;"w8D".6ۿxه>`2Gl XiBRq\ 8*VG#1+ʷsT[EяʖbvLnS+kc1!u6: #)<>tg7je5/g)!=%)ꅒ\*<~Hwh}MT͗'YAlJ1iiApgPN &XtQKW]RH-IWcgYnC5YpggzxD(DNmPOw9QD- M@ 7Q%)DV̒dy> У"G!*0tTQLϒ@Qi%rb oZgC;-4-3&5)PT|0$ZI&)$%/")X 1Qd58F% i: 3pe"^yˀ`R;N-L[#xA֠8fƻ縧ĺ"j>@Px"Q1#S.  {9VNekM'4Ls#MiDS{-gJ쬘 g"yuNkCr!NIr|z+ךlk|eSSXqIs ;lVt |u)BuMn4-4d u䎛;^ӳұm*sԊ s* 3A*PqgpgqPYJF:oO)JP`F޸,.c⮲\RWCO{$'T#0~r~ <~RsWvjۡǹ_|..s!ˋSU.[JGB[u 1aZq$_)~}1NW&FqQRV"panL s78?UE-8.{ VT6L_#6;_t]xS;3뽍w܂-5KkЦvv5VaO]=BY`l_jV>ԜR %`RZeBDԐYQ8)U1'V9Q2[G¯-%To'45\Qovp$?b̘,.L!*?pg0e F,Yy zTc &"- ]ީS{v<Obߔgo-IZpkM n|U5le5L,*[4?a^̳pпj:zqxGIdɴ%XJg+IŸNztL?UW^{8YJ9BWB=uDmT{u@bɻ@EEn!^ H4&+$ y:j>1E\OepԚ8u/K^W\٤;^Q3/&w9#'m!ë3ex}3AДֵreH9:LpޑҚ@/Stuie8I;q}w8V=HDZ}((@E y:(Ds9! {͌gܪD5D 4BA,3"jG#u o$S1X>MLTc r>I: hTr}1#^ Q_*jQ\;(GZ@2>(U4e(]DU: †ZLOgͪ38plgWL%Dq|zrctvH51^y<ZewmKO/ վ;?-=83HLj4$dd\%k=ITZP 2EqdlHҲ{ t !9GV!qzϋ`:$X1@JUܢI J#c1q#c9R KiƾXh c!XX(m~"=H|TmʎFw/>2b{`A1 H k q2fo*&jFZt0SeΞ pg6^ Dз#`& $+&5)%vvqXq1iǞHYQgk4ނ{E APEQ0Ї5ѫPA"Fd&)eƕBNRiH*C ы( bM}+$1&H ģQ[[>= xx(XL?EDZi="n|VL,vu!uӒ}qQEbWq"@G@'f]tM*#AHS&pCwҎ}P=#(–Yd^x̑;=mCwr> hw*!*qn|6ulTS >j\͙SU؎C>Q#$ @p|(&0M䓗MI J{8ބl& >FC&fk&^ۜpvk=dllrS^>^pp0R)ox@2 g$ނ"1"kS u~ =0<0 x;" ΛqEt)ڇJ 0gc(0P3y):& c0CMК3ə9"YHl|mb;$BgM ./kdl\ݽ5؁-6w}Dx`àƗk3xH21kNJm5NREF %9 6Q-snt4ea2B=AL׭e?]!.8~bxNPM5Pggp'44<gl'b\s쭾_/FXoR56+(A2&ԽHQin 󿇐R 5 Pչ3q4pF)ȼT<!8qʼnOP!O{N(M$]r WcW:RRrbL f1b')uz>Ns@/֞_AMo14?_ Z ȡkN w'v|+t2gOߞY8nCm0t|IsQmd(be#Dn S&{ I(384^ʾvYĐMPH%-M,=& Gpi(752^t x8 S[{?oq6#5HVnPRc]ͫ6;F'ciYًKq}D~Nlj||b4_^?֓8s&2?!mAG%WޝTߝno̎Cy^ .mcmg6G/4'ƹj07uFnG>hș)g kemr=Na]m(fkNSoX*l9T GT)['^(h ˀ%4G ¤ nտa FG.o?gc(L=XwuG]j~袞7tr/{}ѫJh*!Ypoy6Od}W_u'vz>C~m|X3 dvz2zuw9y7=ֿ^~듕wM;\:ί~U>P/>&e@_0ӟ.'ex>e_BFRY}1 }뺯YNsjE{sMc6|әǬ4|P Cw{YNa‚V/t?7 CmYTr^`jtXn!rյ Q0z{l\ofBe^}>H]E5k]d\vmn1nyud6|ba¦{NEe,Du~u-4\Wf>b{ rJX. JV$ 5R-) e_/ZVu9~PE@/x@ J%FA4hѸ)1)4^k ጲ(Й*^@N:/hn[XTH岼gſ(E[nm_{OY2 \XzՁM;ǧ'Vیra(O_{9ug9wv>y}h{IEJaw?n?{||2ΣfmoW_PW"]EAZܮ"b՛] hw)w^6į;UTN*xuMhrI~YǏ(k>8Eci >@MIŝwJTç r A (D.TZ 8Ɵ5ZCGRBv9_ٴEtGMru0b_9k>d PMupq8bLf_ʷǍ|v5`GWm;;zo ]1vJYZghlaf;Yvy_D0d1TmO^TBJ]Ja+yeWR~:íҩy (9Z*O ҕ}gj?ZB¡(IZg#!$t=Oyk#ؐDNH"QY E^B\YJ%Tr " a\g1idpETv1q9d =%Z0ŵOC_ᜲ8X=.y\Qb㺍|E˽>_t^3_Q픊F@Y!3Jj$, ղ@2AIeE+S@-\b1XoOʓȩ 9s]Z MT}:))[,Ikƙ2"G B;4aI0 jmY F (K^Ί5vē=k.N3nPԜliGlH,&)$%/&iX 1Q]oFW`~T 6X\vnolɑ8A(Y~Ȗ,"-fl_wU]]] t=5Ch |~-.#ג,g>T&$@V^U@Ni x% Y)K/c!tIo:~}L-Mi4ZQ(rp~ɩ/Iը?<1Ӹv9%]|S;i-Hn8\Wa|dZd9p8M|^hѣ+&$^G{ciq|qՕњL}0; 3tXTl#RTE/jiPqL` Pဖbpws~ʇv޹f,:ymUU:( >XKA@{VP(bYӰV#3Kk9 BQU+ŀf6!VV'7) DZ^l ˑ KqRP$e* QX{y鵋uwԎ=衹=O,M& kmy-WVƴr:X\걫Q_s]M7/Z̟‚d_"i&_uvѺқɂo'sy41#j =4yT̰ 7z(n>\>{{On~i42ZkJK}K϶-7ߠOg7u|dOvH_Z΁Z]*F\PR+ st0dRijTO٦dd^MVk]Z>Y>k0+em*T[Y eUT2Ypk%FVivJwk11yIb5Ͷjm0ޞL=My| 5Oa=#8Pа"& @?-QwG??.+lS|< 1OC:5o~Z [ϳY堙RQ2zrSX'܀4vV;G'sMc* d&9p! C7ec˼faDWvLgC{<„4W dD֖,Ԝ e0!Ԣ~'ن0ȫQxW/f;׆X|t?:GyzOvz0"o,]bT~؜#/]y<߃YKXlu>Լw|]71y@׿oq:o8#,G^ܜ5{]Z?g/n{gܸ9t]%.4d/l!{!$hP4U_ sJ:5[f#W:n `lו6yv=dfս٬āhccbNY"D53FDRU1F[Dp^'%EfAGI8gUZx ̂Vd58O/gj`ݼؐl\fem=f4_!)Μp S ^Ey {ҥvB}\$`]୷AJuB9fCH* mm Ά SwM.}=y͔`>1Y4*Еr]!`Eo,CƘ tuta/:Yٕ0}+Bk:_Pa2xte3OkW,]!Z;OW@WHW d+"R{ tjJn(zy̘bgVh9ۓuJ1[Еꩢ@ڷmoྀEKtEh] Q*.:DҬl]ʑJXenT}GNWbky O۫(L(y?2_)&ټhw©iPZ5Ҵ\>YXXD_ju"Ftut\n ]\BWV@PʁZC {CW5\"AB:D^L Tl+#GtL 2]+Bi:D~H?mU"Zwv0ɺ"JnuE(l(z8fqnNW@j;V쇮C: [ tTsbpLcgرU3VuݢRHzDXAoh۪]iB5r闡iJwMWӳY-bG)YF4@*y9JP5#/x&Ogy`U}qYiU"(W~H􇣝*2{]gwgӸL?j[ VN@=hi6)A@r68vF^+Nev'GkOݱ̛bj2,v!WV-oyw Vy^fU 2@ƕD0$ePz.Rބ+t.I /]S *U^U=SE8W}qi81Y Y*QPFkW j'!A"$f\1DOq|dBܢx Al<9SOhQҘ'**u5/ͳL~te>'gPa JIQd.ə*q_w*h^Jia[E#4RU)SRJ1NᓨTF\ɀ޵qdٿBN[,] l/c iI-I){(yѝ橪[]JRB0'ZK}@,XɌalJYhl&ۀ)J#I>b֕n)DD0S SHeqhBٟthmsMJ#TAtLa03!Mާ\1ИUFҚw-b_Ț,0Jzxx/dp" Au1A>"iw>fhiV:uH )Sm |\&9 AY55ksHH%9ʂM/ dJyzs(|4 AJNjHH I?ҲchJK\0S6eRHEm!BB>im-Uल8DQOJ=i^:gICR [A%*-6JR`ƨ%gE2 o׊gM!(dGhUoTddC`JR/ޅq*1ސ ," VTTPtB[Z CshR+oݤaD)R@!VMďy ڢI±D6V:؜b"$l0!-CHL%eOV>0fs`qHJƬE6,$wC*ZeD@Ww@AQtU:Uc`q]bnJ Vor3E ] * $(l`Q&T$2z|d, |DEx*dM'(M * _tk58H&`^Ou J+h#1wl!ՠPwSA02P(SP|k<(=%X !@YP&9ih$mfLV"˄<}n*nMI9$(XYt0w@]L\Ρ'8TR 3CuUkp) 9mkL:4i_ J-7Q;)ti7 U BѤ=K%d~CBY' BRBA6&b.5TuAd"F_\l 1R)NվCEt!VQh8(l,@@Hv'UP:o3V7_&M23u&y_{1!.E1k44_#(*SN.0D!ND];v&^o5ۚCm:@G~W2z(9Hmi>jZ. p1%Hv95M %tB\XK(jIR|$2LBȼ*c|p|RT+g0hZ{*EK*BzP:G͐ B@8Md!*T?y* l+ɒ3.$S`U0Ov?b>wy2W)Kb,oBF"G!A]JS0mA^LZ@mh= ˅"8Bik ) zc 2;"`e\ ǴjYk{eMƹtqfQ:Z 6Q\Cؘ5EWB bjSѭ15 UyHY-:h4v Ƥ"a[N*3B*=n(!/k CC*樇2tyD0CkwŹAY.j0uF;(B , R!!KHJYHqXoڬ׋afV(o7`E^QH"VR2P$UAU;Z7H.# U@ SBT/2jR%:#!SFfƳ:H)-й'GҥH*8QN193UAkgΘIиRO jITAR䳗g&H b2 D 9k듵@:E'{j~5oj+FsA D.L:JVyàe;̀|" =peT=?MД nF5>dN֞ JOQ!,2T*`$In5 a3;iShwԇ sȎI8Ьd|I8N]}橓:HQΛګеԟhD;*:0B [! R GRe[ݬMvp69)8[[_BIY Rp7K6Q,S 3;y&I**Rxq {I_`7[|9?ŢջپI/7 R`@oz\_|jŕn3Kyh>׾puNMv}CF/n\Av8$cήb6ںvk'6$gΕ<'G*L_ZvJN >%'R'Z-( T@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; Na}JN t@ޚ?9̓w zN %P)v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b';HI ~BNq;3:Zt|&гtgN v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'uɟH8>egB':@$?v=G'P_xc N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@z>NϗC;ܻީ׳}iony}7\kr} SS2.7tK`xK'c\d%K)6.=zm& UUܩ:i:]u3]=C"'=%"'Iؓ+dzMO2xHW:)`N<UGS;gHW' ѕBӡ+h(ZOa@HW}u 'DWoSWSʧNW=ž#]ݎNXҰ=bO`Gyi mp 'CW;T誣5O`Gi՟޾A+7*Z7ס߈{bOWЕeC/Q&SZσ6^_}塛oݭݷp9U0F =~/jhAo.08AbLsʼ}CK"|u7"r;_*v>eIwKrY^"@Kw?_f%&(>݋U+.s|/ͻ:,IâlNGU݋nitY4 B,muV"r/?(Lϛբ^7qҰD20)5L߹a/H=a|_k P"$rؠID tSLy hZSP%C2) MuL:7k{ݢhخ/Qѫ}L>^aBF89f@)“lC+jƈ,5t$f gdN S@ZAC^ESC\䄵n 2ݻ~˿-w^vڽZzvJ=Dį=Ҋ ֐(X3DPJL2CI&8NUϫx›#a~Xq1Z?5 oLrqwz؇bBH~^_b<\S!8Hc3e};\0g}=nvz=vLMNrR/Mڿbq|7;̵~JA6PU[_:/=:SmBPD8w/sN>.C Dab|KJ#i?[͞ TvH9nn~yJ×~#Z-.};~۷sb /ן;/\+wbsvCѫ?ڻ?z6_7_>nWq}.> 9n.b1x1)Ұ~~w5 w1B V9Pe}ժ<}s\jJ' E&Ō0BC3101|b:T}0)Hrh$KP LLH^~M:OJ S5!1L~bX'ٗw˫ |~{T]}j;'Lp;i ߮͟xD QT4ܣIg}-SBؤ3<3 ۷(0٘{ƍ~ܻXx4CmmU6~T OkdHʎ~ᐢd! (y\p0`F?l2$3s"F gE ) j[vC@XDVK5xnSЅf> ╫@;SJV>xS:e$X𖡉Ԓl%B6jj=Lح4Q20USEd>@4^2ҟ^@Nkb%:=`ypx8حŎEs>9dvfb2fo2ֶZRrOLOwIt}iwKycg{l-Jr@'_Zk·+&L.VTdc>?? xvE]~VZm<ժ>c¶a+XFݏq:Fu_;,+^ݏUڄT 1n4pב6UڴT%۵4^|20ӿ_-޲WC~L o}KE>ߠW4{EGq=Mˈwbyrˌ$K쯳t|_gr*I/H5G}TJ|DgTq cH">#?0ŒYLam+tcwqǢ旋ߞa'* ('!bH-r=f띆H&Je ;\["rR"}>c**Kp5rK8l2Y-'p^|=_ۑ[l֠aW3YV?Ez (^LB}1Zԛ=*7f4Tdf:rkѐe 0d!6d2߸R\Y#IqIvڮ@pb ;0M@]N3#l.^.!R <˴MLH ٕ.]aIQU=Y]ۏ0<+x,<*J•$!KyJL,l0̂>VU%Z&)4{EV/Ѷfl[zWڄ.ۮq,.+XP53F!#ˆ3+9U|<0Om6leFUT ) ҺD$)M6f:C J"b{Cцή lؒnnhWQ ׎bdgMg⮨-6Xs \Wn뛭6؇=EQvhklXnmLʲ9Is,be͈gnWt>R #4FsDn?ͪߪY=?ʊ~,UWY3}Om"A8]dfl#e6b$=wI;m]133tGcx Q;Ag1'1+!-+@H7Ԯ+Wftѡ'c?Qg,UYh5: )ꍙ7n-qW p.?+vri+;/l9 . )vZP"R5ɫ.Vы֬+Kn5]nx}"O:$IanxfM&kbʙc-ȉ#a"1AdE3dhI9++Y5rV~;!G3;cҪFO!K v1t4m!9  cވ(RL3b \ 9E|x-)3ItfG` Yt$>WUfA1GnɂJC=ԪbI8'גȐ3I(٩ h 6JHJ \:B8(4W'4wNs#CiBSV-CiZ-g*^JUF<&ɦ4qMV#B]RSBEOEȱg* :sK u򂑉%]+cJN.n%>e6óɀ߂6jQ!VynbDlV#g%z[!$})mf*;I D֪h60{%ώŏo4ݻ9sRzgНi<)x @MG5@FMI4G& OVS=HziƱꝿܪ%Ԣ}44ע=->{hkW>G'W vcvn=z&H`6mRʦJu0@*]{l:k.C!eOHB]J\%zk =ޡC/Je 9 cjs`!)I%9͔`.͂,u$YK7! rY)Q'$!=2M }!ԕ3xűD2&yQR3}|"rJuȍVӢhj~7:% ^X%u0RQEV%ÌJCB8-] ,RG~!k_pVq pgr*J= +qyZ4F+jx`lfnMiNr ǪMFZ{V|е?t=`o_F ;l{LH?{ƑB%ԗꛀ\J 7!労EQ=*\#QFAA(irN!Q]sZ#bSrh$H˙2”n3k#G!9Vu_Q]7 0D ׉I-ȣ MLp%TLKK^tJhZ){2$FHpT 0ޥԕ el7[/?ԇɃfJvϚѧYھ n <)E(-Z{^RӪ|H CqIaeL*eg}*s)5%xlmm* DO6ZeRqA$-Is-=vI!5; |Xؕe1x8*I5$04ui%3 Y1Ka;%$ɀ(ęBuFDfub;e:e".~w:;r:8p-ՙ+`bԣY‡bu2Yf]C6hu%.>{3ٕuk4xo7Y9_F.z`ܰqOH qyo+gf E>b7I TtBKSRl^foXGk ӺUEބr+} >6>>fGZYu&"HJ-ma@ErRn'A…ƒTΊ0ޤnͥnc 2?~g.k ]w ld2'P 椕N`HuL\Fަ.nTn=_A$#Ƒ}'ƍUBVj*K\T"Z\١TfH_Ih)w&5`hEP]ߐhs\ž5ل -1H~>Xe q_R1KAZU Ch nxk\̘5>tJȨ‚!dH>찏X18SR.F -r% }9c H0: c,U ӖIJ<[C3;BRQ0Ey@ܶq2͓hluzcd<kzXݍiDcˏ5=^;bzx'4JrL9w~NϣŰ3}j:}q~HG!) 1COK&.[24ǣ xvǮYj=Uᔏ3UDfڐV]M7­oȂdF9ׯ?}i GH}mVm؆[-,:GqO)i\9'M̿_s|sFQף[mZ|z48/w~ljnU18N'e|JwsmWGFWv9wؚ3qמ g:[w4BY5iS|+HW兞ޜtvO_F;mno:9#tbHy6' 5gJk&^v:=Ay_ϗ߿O/pa<7/8H)?IPjg $`Wl/SϛN[ZZ֭^\ &9GKoxm;G?_}~~8+ ,Wt4/|Ĉ.$+(|>t:C*nTų"Ċ41z)脧b9W_Wx+B!SaHRX%xF/ “ik{%=O!,Oi):R3&Dǝ.+iIHMG.@ s:L/[:m=׹Wef(s<ݪa~V N?ZAbF du{z$< MA4@ vijRA댞"->ŧ\X) c䁩φgR5gE3vEe.Q:+J+  ds 'Etu&ѧQsyŵM-bwixUS2y|Pغ=w?gOBtGb3xޛE%bSr0 knYX*>VM8\m9#-~Ǩc-"Dss2X%CGOFF+\қ-d 8\%JNngWEnӅ!U.$Rj^;%XNZ,u%gms.|`>1AI==h6d]>c~ۙP׳?uU'fW*&m&T!X1`Բ! J7l> ie"\`D5ҌFk~T+),\ ge jpErW&{+T;P^y :\Y.% W$`储&;}졁qE*]" ru+ c\\QM0HjU+Ri\}=2zszcW[ žԚ=N2zi W$טZpjT 9 qշRף,,hm\o01u ?33z.g&c9暴wL~5v)Z}[,ӂjUԃi*Ie}4tnbZ gW$ prBJRٷWO+r\`p\\- Q}lkW pu8RcAU+tW֘re πpEu=ɵ\i`Ujw\J>]" E+"\`P\\kkUBW\",([U0'$`ЂfcWCĕeMPa\\k5BvWv[ofp+ncF|{v*z9%. %H6Ia- Z \N™~uO<[{U'ym]q>^M*c6o0d K|:;ƶo8>$'yYc c&.VEFTЉh[d@^EzE/JQСҡ^wT`MP5AD v7j&(ꦔF./e7U|.wC\[ +RӍ49KgWc[{g&y';٦9:\KF4$K h«xM|N7y^xs;]޸S c$ɔb@6,`x5# $ZFHs}Y@adGnpu6Ϙq|d[4y"CWˣE RZc)ekZ(?_hG'y> Tx:7Eߞ輛Qt9%??7ыF TPy-賆<;~v7-:ò.M2f$ ]S ^ZѳQk- x|[<[ icQ=dq@W9eVξbGRs6SZ۝GsrybY`ktzڢ=ϚwܬoGOii}vr"U{WMjtҮQW+[E}Ü! 4X XCp6 "wh2R^s+Y [l!z'(Wd ~ؘ2ꐩ;TZ_Ձkh&k=Df>v6.擘#a͍Ke^`O!LE-ˆ&-M,,zwǻwQw~}e>l$d-H7edT%)1~Լڿꋙ_03_}}4-.3gKU}i_m7Ρ-˲Ӗǚގn׺Cj5w]S!#V׎в's<&@قb0~DF:F D2"Z 1J6Ҋꁶ"W e*UPF8ä<(YՉcRI6"#f^rrm4DJR0Hp8=B 4#.ۯLNfJJ,0lOc1OQ4x.RB\&2F"yBAxgKPX~i4jKS5Vt,+sl $){H#AG'#)Nц!QM,TFH Ml|CBT*ލ}<]rD$Tj5J5x uD< ur%v[h|\ՌQ~5% YUВ P-EdIj{*1211dTHbNE)9LkZcA٫=ԲC!”C]LnxfQ]MAS:7\tv빽6 ViW#‘6b|:/.-d<>-N.] h6]W^\˲~+:&*I!o=p<=͆zm(tn/>Ib &ž-Ғ.`V{Xt4+j&g~%f{xbJ&Rg/? 5ە;%t'?*83M#L{]yv7;{6:[Oͅ7}mۛI1+ڂſ62R6 ґwCFiu]{ pLqc( Dh,A:-Qy "$ybm0;L#?54@]4 ~۫Kx>P y 3//qE9j(.?\kA|ҿm꛹t48Z5icv`e!ی+5\ Pց嶾a{Cs7!U0z BX` wAwᤗm4qAzqaP9ߪ%k]_k𭙷-pkB[Ci}%iȞkGaqZYKs? VZW ] r;a}Y-IZO %V G"@cuFHrQwV=l_&07mK59.:r`ƒ+(c B8QS@%ղ}lW10i-y H%" N$cotvD,Z(ց_ w_yl"MS#hzu)3jh".% Qԭ@s] ms '8uV3g@%3 P!B!W!ӀfҩDIJхRZ"${AVnCCIkJrMAH,hE \(/8X%pG·@R--LMN{{kƻp̫s+̊wOgaB;ՕrQm',^n_MqFg'5Zws(6ha\DV(NTSyqH !CiQN~Њsá*8Ëj \-E МrQ%u 66ʖx6Nj3hּ޷cEnE1q﨧ۢZ ٤~̻̬͆a]k]Zj6N_}qOZïצe]b>;Y\8ZfD}p[nWc)FY>Bkw炐--m- mͰͰfwύͬ3qp(U|<^,Z98n蜤ۇ[]vr[*ԬZe7,e$7,}2ֻu+(ƩjGi1nFV)Jߛ PxxSf G`!` IQI _~{Zu4lqod{z1wh׊.kcv% ??Uu4>aȁժ GGlv;ð/ר2rEU8 1}@TَK\]fϖMoV}}tMFOh#% zNƈF)ǂS@FʨbYJ4{_w6,U^YKph͎n ɵ!@Rp6EjnArủ-Tmb% R tө`6<}&O\3;z.:4 kq~LH^u͔ i;_'mREȓ%H/`2ˉaU8.8b8']$h5*1;Jӫ@dFmzջ^5" \DM"VR/S0F$'^'>: %R3tpeboE༷T,-h"u;˝^iT3rp!`5D~NO"B*3"3Dm($$;`N%@~%SAAe9*)LJ"8ԁ($Lk3R~0rv]IOq<>E?pNI%U9r<9=V(l%kolCT+ߞHLeSJ.KM1 e(/(#XbvUF$GSBv阬gA4) !*C.Z3vFx t ⌇B۱.4{](m6ȸ0nOR.0Vtp坱_Xc*&$b΀0\8WM)H)Q47V:tc, O95{.2½geɕJx%f6ꄶ161aAXYHЦ왮ȹ]c(;jmFڋZ{`p3tZiMNJ`,a. k!(PA8+2^2 !' H*B ZA]- kAT$sȹ]F}~p:`W4bgPH;ֈt.%UA) ]jykD3s>duY!ă$&qR(nZm RpjY`(@l. ͝Y|ȹ]#~<jz*µKθzQt^/-jQ@ ֠գg0 fUQ"x+]Xc#{zqg]qCX>0 TX{̋jgymaw> ܤQywHg?~GNh#`7P9R38x y&v`=As|@/x奫/ g ex:ģ(r_p|_qR.zl-cf(5ep,Rxk9%r|ԙ}&" / C^CVXIB^N<~o0c6˳ƝX:WZ7,ݪ_RMEZ.XNԲh0>3OGU|@q˫mIL+mɶEȧ0f L/3x=F/_L#5ؖܒ,s"e%Y.ٲ:Qb{뜪NrjşzYJt|'UhAp'H.d@T!gNU 9Ux{iK}ۿUJJWcg}VVW^jx1q]6 &eF2 ,Z#ՁHO[W~շTGR]m9cY;eJ`s*E `<$2bFX$p/3Pb$|8l /l+.x|jհZ`dźřJ _NHb`JD+U6h`>rIOjF9dNu,a:c52bfKm4g2*k$gHg`wq M07┠%gjvO;qzʼ^d!Wk q|7GW ,r)>}`tH)J&ITyu, ЂzowBt{y, Co01去pRM q8â$d49#s ,> tI\1px5A$7j.*ݭb}7͆yDxjnضd,c0+*Th{.h/hxr"qg'l0U+4j~8gTM|x]veH'd0&>ؙC fWaϱr'h2]\y{VO/xlzVJn 6Ͳku~WjU|VӬ+5!<}=\tbp$0ɻ=@?7aTKv^Gf/M_cL|沟omoq8j_[kS~Hf2:mvO>Ԥ|?kB^~o[,ZeʸJkM+C:p¹h:Sq}ۏ7s{t26돶ݩ©h5_ч3/rtr)'ic_}MڃM٠N?e.?\M~gy-٧ ty'GKC$BF0kA?Ensf^~. |¡Vf|];f tdK+4׬B?F) r ;em-l;eRڵKw]Ζ_vN+hD!ۃb?˙%͒u CulP׼ =0E55 w1$qcl8r=i;.'W5XM㛆{)wfxAM{Cyg&lMyœ̆Ȍ~ZsvaŭN՛,D~Ahg. ;LJ%;9@:%4PJ$Q Z#QҀ#xvOi⎇Th7G30<56H6RGڪ=*7D嵆@gF0VۀF#Ч$N//PkuuY¹ ƒU4К?I}`k)5/a1Zd]h&CqN d;%=U_BV1ũiهd?Ӑl9?2H#Y'"">-B~ pk]V,hSO<;]b\X>fIZ.Ce/MX6~屨[\Q@kTP-'Bm6rt.QoA#E)Pvh(XdD,2'"uI{ST)ƄܢR%Ola .&f ^Gi؁>hyTbv5՜=|f) JxcSdQe}T9*Aސ3Ǟ9@cZx H6!ĴD%)/ADH+);""1M v@ :g(@,J4' A!F8qg1qdMI'Il8OfulB/ߢ R3yqg4f޾Ŵ[J7rh,]ua3ڼ =Q ߧY:w֋,f n6^n]zq ?ڶ煖C&g7^4ilKtㆻ\ّxᩗƮM7];Hި։9%^{Ѿh|™u.xtvy_-?׼E Cz*#>G]yBW-g0Gxn- rD:)8|r2mFf,\S #Fx&s帧X:^[ino$}[Y}Ngqϓ 9L:JFI44Ӡ)Z蕷6퉸4OS&W*YK(viQ{Ŵ$zgwWꝥݷ]w~ޚs+{4*+ٻ޶dW~;`g$A?ceI+$[MQd[K- !"f=;\e)Ȕg W f P`WY\8 pYZ:tk=}IbJ?:\K`N# W)~RJ_pEWM2#ud̆z.b~euR٫Ox6Xɠ D‹%Š77 6;8854_C(zf6ga: ,tW LgiwR$ha4!JxvZpŕP*KkJ Dp bWY`#P\MP*KվURFs+&wz))v0OVeW)0qg]ugj~.U6jp>̔R)HQZeBDԐTAӉcow r']K//o~IN:XfÌ0R' fS̒zb dj^z`18jɝ%{\!OGEU0O3a6x[..YLʺ{oFd)%m.KWh3Uw⠼Whq+CDe%!%ox F߆8:{]zÀV{TZ5Tz9F0@pq@^G%wQQR2^*$AFoq.FIT&H3⩣FksȀcJ*S7:C} Ñ䬛Kr˼>tHKfCwYlo3L!~aTNq m,ݡݹT :"[9_c#%ϔl#rTwAwjW$AZ%,aZ$j 0=ha{7ld;xNϵ/!&xP2#sa<hk1 YMY4p֗AբOZmƯ'9sˣLJ8I噲gx' '+~a7bp~p.r dB*O FLhAEH;X@:"N!z#IXg(uޤO0,J*A&aє9#o3X5'{x͂L:Nҫ!2yZUv gl:lYa,d,c0+*\T=\[9 6Q%8gY <Oo0: -X=-<5&JLXvkʵ0I Sr$!.y<q 6_3ȓG+pT1ŪowȂ^+ִ4<$'K zu|dz'h2}>iw/q)|>Ftzx~ﺲ{LK7xGKxQ?̌C_e7_~xÆIPG_K?/GȳgoaCw\BFڋש3zىÓ)E0kFɡ~nbf/?~.3&s~,yhA9NԮYRU԰OH+6"?nS^ w,S~%/ft"WjqޱYw6{ήjUDX$=+Y g%/nzzOWtmz[xaE;MU;Y&6#͖M7[fԌc/<l/^˃TI Y#XBTI5B%) E4id{_N><='t5C]z @ R&ѠjM9$Qy6Gi63Ee-5zyeK[XlӍ͍il|ҭ~U2>ۄJ7 9Y.ia(mp#7n6t.78qe)F*mVR;R%_7`[(CUJh &J }*ږ-9orew?lˍ؎lˍdi޶Θh-&^fɨqzo4 QCJ -mYkFr/V\KP9r> ZTk#11㕎'FhiX1+5{\UgmN'.[ZlWNf4e"S.r\gkT6G % I+ 8 .mۤ믁(Ǐ5P6d5;fr9`b3#RLFGB@bipZK?4- 4*]NDn[mknGe2Ɓ56E6U>հMRSx,sT&jG'Lvp`L,EA8nգ9_E;!jnX;sqvGؤLs}X XR2qHhMDPB 4Rh^%e|)Qjา"XK"J\ l$""6J]3m<ɍGɚ2iJRBN"NNFri̎R(#1)GE4J"ur)\І$&b;c$:'-JvFOZR4g( Ω'LV mRrsH^H|nMLV.-;z,vSvT'_)b{G c\/8}u4M^# _G n';m'rW|~q;i5ۭo{hl'Ym.%d.Uԥgz7'Δ'^1:zQ1<׼BK ]Z7cێw/yZ^L)*p)aRae\'WK1>*1d&gqft9{7(^\J-Ek i:krZ&zW˃&>cL̘ gsW\wRDމh4aә>W&Y?9rfhV666,4*Y?j贸V*ZO_;Cq⤣ ŅA,Խ6[QmWwmbO>nq JC'T뇋Ev_Ncas1E15 ڛ۱~<6uϻ/Y*ܘg+6ŞEܶGfm! _: O'xV~3t)m-G0R,TVja\~nSA KH!)p)`s*S= Npb.jg(aHGυV()5ȩ g܏ܙ!ND$4BhShy֖/240(%9Ɩ$bL2jj@(BFSkUFfoO{~XS8>k0X4b6෭}HAnⲃ?J`TYFK`ڗ@-L!`jjm5=][oG+_,NFKMro@B_%(R!O*KJ5"[tuw7U*4$^ǘL{X'cY; Ms7Q,,lQD )@.RKd.s>w*R a1qx?oI?+唜jJ*w4ܽ[io4.1qLH0ilnn[C*i:;l\6ӊ8q;Z1]?oCZc1FT,7 2'C%%㘲^%AqAWk!ƒWZ~posUV W  2+ Mq%9D$P eVIY"銜\|HSNWijk{gN=sBR_{ᑕX~ٶ+YZ+gQ~׆Y+L^" Cs\BKޘ|{fpYwml:I(UgN(V<$yr A94:ia;"i^:O w! $4:Zc5Vaq\+ٳgNQ%s$) w[u@[SB@-UYh .=u=Nvx}m/֭ Yz+p39}4R$%)7G4pFZoa[o7Z/!&ɐkԁ'x#AS3g9'c +HQ م e^lZIIMa"AD :.hYbs/1wB-YGKh&xUgqWrЙvQ}lX}; w lLC5դMO.wb/G??9|8 cقY%R`iU<]M6YMU'z:[.hr3ۍWnCw4Q;ee$[ Iش̯'2Q'NQKRY*^B\6 FsYm/T?Dz;}Zh#ijH A=R"_%8UGrzAjcm/lA^Ub3qصNtE)1kfM;#*iI%MB#=FtF8yLjyj1g`[Fu>ȿXI/O2=ggH^tz)}߬jv^'mGqi76|ƅz}uDQIt%xXAGɌ/Uݾtvb-ˆAL ޢE4lo sܣC#SgkؖVz_=!) lJ]胃dK&oǬ## 36ה8; i<nL;vDmEZn-mD掑gfXK4 cgaF)Y wP5Źp-䬴1XŁdF.b6bQj,f|cT +Բ2LH!Z@dHL/Ho8,!6}V1q6#⟗HSG3Cu6%"4F V#W%GP(Ƅu2(2M>euIlJ;vC0AbXy7uܪorr 4:q$6{ f?nU_@H{tS*劌\TJU1Xm~%|-+Tj|n/we̱ A&A€ʀYrLF tiAMo}fɬ}&C4:vhLuʜVLa<Gr/4(7$hMFL@rBkAsJ|0 duL\Fަ&q$9ru>p d[{2 օy(# ?($0㩏R},QZz}"1m}7Xjƕ="*O*\UC"p-\ArZH% \رUáUR[N8ay}I}Em=ӤuiR9 p%Zud-j)඼/ܹr5%F;|Xq9dD%ԙgLvџo~3'&M;4ɦ|3iN׮Ӈŷw7h`(ۛ~ /hlfNhNHA&zT>$wH]ݸHMٸP`jg vޢAqURPwr` 7N# -pmGZ(ru,"Z(R.w7ZB1EBEQUu4pUĕGWEZ#HJY WoV#+رに"`WE߸)R7o@1mS\iY΍ZZw:?բ/k{_g?sRJ+`8*ϤRJʜ;v:^8=X},,\ i}7[͏:?1: Q#R)0L `g`j.! jلuց& xϟ5o[9lf>]u\>.;zim7n߽7\c[>( /{// ξ,$7q;dK?vN[m ZŮ")o|zO!!ueQQ,W,.ԷN&pfT sgr"!`}d,W:|cr;Jz+$_a~fieQo|jԲ(gx߹^YN[;rfqtY'o~Ut5};|p _.vyIzcgl..= ڃ_R:1'VvF[d{qttO?{(mݫx7sJ.V㾏1"cDC bt3C돋_`t>kƽk^׻P5,fU:-f\>r ۇzB/GOgDxI w)^i F!b H:)\!!hzrYtc$.,xfToYP$%ǜ|cSc36}~XUj -%G1;PZT '?Ά-Zh6wΌ(~ [<4` :MSHkj%0щR5..'Sv6F&0>(騷O f]e5R.Ph8b//6)9Cs5,*3Nqҵ⋍1d U5@0oERt5I<8ٗ2;!cQ?~xX[JwW!čjIsWUoXD_o#mi>jKB+w2QT5ޣc٘cgk"}4-C*lcI@Ȇ`,GgSȐH"a0 fggu!X0 :huϸq'iyu~=d%Xr $zUBœlD&VGS 2 Hȡ)1`¤O9B UDJt/QBV|S#4%` f%Fȱfg4fcх;4쉬-P2jaŲ-9%K(J ٢T0q6vR5)w*|ػ9MK4CW}t}t3=^ڭȮ詢#<#Zy6]}S#::t. jFڐХ:>mjwt??.YV./+/hg.ho@;H>4z;i`x[__9>d3e#K&q6E lҁlP&%k   T!HIX].D -c#Y$P¯bZkA.ǒX"c2$[Ě&\8!(\G ^&rqsmN&% +B4PG%X\aKfفs1Kcsb.> kgJ2S>Ԩp&$԰! &DX4J-Y¾ ARn) ΰ(6ԒXEoͨ ȣN)VFzq߲z ӕ ]^]]i, rԸX5L;25=drUac(9рKʝjSQ9Fdc%|,t@zS.{*"ꌋ48H36v@6vVK'#{ms"h/ ޳4/>rGc찪܈@#`jfr6ܤ8U7SϾ5oΛ6]v=3ۃ]lH"@.=waxI~뷘?DY s)sI$)j,R5v5qѸ&Z%  wͽ_DZ0dlOt2΍kֻrXz}^{:(<VRm69bKt *^ b}Hް4ت꫉c*`Jj}:4RZ7T Y $#**G_e-vC`';f܌JΚqHͳ56\Xz`^y> .6(6iqϚĤID RzHDZΆtH$[*PZN Uz-I7uR=0q@4cW_B}^}፞{dm_qǧ.p]ItL*4y56I9ԎjkrM=P e=:cqn޺5GtI)3T!iU)H=0q6{|z"+P0k;3k;3{9JRMFi4+J8naS-m8&u) o!"L"o(0kŒѲ(=AT QEͅ9Lp {[1 O(#qi|6ɀU9̳zDB԰s3Lfk0azDV Aq mꕤ^\~&K.5VM| ;@h5det0q6{o~qqտ:iɮ~E7/޻eSXmHQO4W85)LQj/6H%Jfsv`v{pa gk"}Y9rB ~'+;k2 /(I[$ُO +\ 'B}gz`N:AڤAh@lB0mid>QD;4Og/-FS.OWe=pAmRMf]ʒEL"uI_C+LQwR߬K~.@Y{X1zo}yE}X|տMePjEcjAUbp W }no?.LkP=9?{F_ ]%lvl6|8P?cx I6V6lH1LOwuOկ0ӱ04>})w÷ldJ)KUk>odt.u0#w&bۭT l_ZS)ĴkS'LJ}#LgTΪi'~KTUwޕ7^N {%H$flqb;<Ɩ-dII7~b-i %/odY3di3Dȥ3gZ`(Y|8k;Mˇ[m:K :XZ=Hj4Tialm M?Z0tEx⏪ޠGo~|6}~~};x&`wa$s ߙDTܜehihi*BWӍ{] jf(>84f{n:D~c2 4m͚`•z3<dT!"su%U?-P*wKQd?m@SY<p_ֿmڳY+}k.pk#YK2C=J*! qhih#xnKk<` Tb$Q6XKJc 4u ݞN+{:EXkw#+0y%WI_Rں:<s;i#]vBQsʙ<7=FG5uVZoF/ڨ1h_I1e\F8"1bbH!iBKaҍDZx0kUIhHL΍c0Xm߳KBUڦՊ踗w9-/-1 '㳇1A4*%إ*PZ10׹90]Z.RU K:DDH|1Y 0<0hU: [K@$AĦ֮U`2%vh0iX{)) nȝb΂wDk{k5rVXϳޮ5BxfIA(ghhP嵕%&Irn^7pEgK38˛K!hb\ * nr*rQ[&bF!GQxd/-!j i!F0"-.bg{Qej_կ%tWܷObvÛ?|;eT8X<\~hѕKuƒ>Ա^fiysș6WJ\SQ*g~'C7Oۯw=11д_>mʷOBg^`֧飷ѿ_f&{Hܤ>=M}x6eM(cɷRqoߛ"Q?1x ~2'Hf5-C{E%u\ŌRϥfV=_ ~1_y<<]jyÛ-EK#,: ,?mKN q2*ΌM}iom,6tNȇ.Xzm^{àьAgֲ!xYiE׊OYE40= ;,w?A?%q=~zݨLmPnT adeRr"=G~4v+memW2\Hh)]&uղ,g5ay½Jlh\>Vʧ_KYUn}S`4мnV_L{:BgJ_({1_dg;n̾ O~yg8o[zso`֒L{ \mdM0 b.d9S& en)~(Z殺nv[rYhǔHPEi 2FBčJwKaO|)[-,BjC%u;x0A95LZ5MQkuO@iڦvsnql?QU6?a?>sgSsGCs&ȕ@(G>aZ!YQwkAQiA[1<9$ FYk} KNzg@B?Wr]qβrC+~Bn![ϲW>:TL%T 뾛hSvsrwswp7WkĴ DRW@|*QIEWO芯nͥ|t8pu3r9}\R<\݌J vpuW)|Ơsb?;Rr^. w94|[0k.f_{/q":Ki3< ] ψm ̞}5\mLc-Ӊ\6=iZu0 ah J 1pUhS*QP)0CMҮbsD#刭;\%*9• IhWRFf\o%[W,Th苝eah);E9ӂQ{1b1#jxTjGXC?0/Z? ^;&Dɇ:9߿;203usk*'^BRQ頜kiT6u5z(uݤ1h߭WF,pʹH@̱6RHCN\<QYw>R'X8NӨ<+( v+3 \zfa’/lC D$PKmP[u:zo`\[5D1a D h{+G땙*mRB젨 !F-HT=qT!MĒ{f9!EV=ui.. O?,|r9\/g)__?J2zuџٕ~p8k>=Wrmڛ&'7F#}"^F %Da'ϻq|R.f3첆Sn]K+ܞ?4oԟ"YG[+q=h0j`|ty2ZjE'YեnO|1?aX0.w%\gG?͛·G0xҕ|,덢mSS-R_fgՍ瑨 D '-t|}1O"ah}eA?\w Wf-?oi٦̦,S}3eEX!;$Z,{{f><ܓ`g*в&'] r_B~ೋw׮SkPslҲNB] ]'rZz"ځR?ءW8Y( IL'4zhs0m!DL!P"C }%<[dys{}#i.\>-9h:>}x Z;i !g&F6АHMHRۈc`![pҔ1x C0{_b`RXr)eՁ$[p3q[8٬ \{ )_;a#6$Jhk7-C6*2+ RIe!a|K-۷T|;woi9hi`Sdۨlv ʴ S,L F عB0 -V|'vWۋճ&8y<26`UG|D (s:ìtQBkDu/_QiS}՞Zno|']4 S g<)M8)Vcdq)av;exx3 )H IY$c+(@DQzDLEr ٍm3q[}Vȵ#r@4Sщ @Y1@'lsy0CJ#}PS2 JǀwDeTL8P`%k[U 2@OH]Q 52ĜI%MPӞ2t)@фF;]ݖl6L{|E¿]i><UF_>hĪys''g_&=ZU(mz5v66}lykYbHG  Omݿ<[#ŵJo…kPϲV<z)n~b@g;^'t B9-{C»جӆ =Y4! F VFkėR@@ "H4yh*z%$9%9{h OpWGcoMoB<\$T@IbJ Ҳ\P! ڌ҃)2G]`O3w0z:6<^8ǫ >utP즻s'o?!w]L <G:Šd S.Fi!H^rćH#Ȉ&G(cd)0% &Q]Lrh,l6C#cќK)R1R#ԅd(1xEefǿ_Bdv-iEh_3w/Ԧփ1Uyϊ 잮]}ƓI%&Nty@h?=ge1o*!_[ݢuCr{G;Zc-W=oܭ5ͻdK?@#ۯ[|O,k{m+wtwg|ֶMzkdT*B!Gv >Rs:KOX ",C:npuGdNd`KY3F/Wʱ^J D`QZ55g]У׳ɝX6XYx}9)7 z:^S=o|Ʊ:}tP07wTuGڐ\0:zk\Fc%CZ%!Ych+T `,w,xwR mY@I$S1bI 'xgW M,t~^HV,ꎭ s^Ió!˒^7fhgsvDV%v(jt'l0x )m Nʔr0&KD}9SL[KIC6F:蔆qTĝ*m:޿ܺ{AeєQ+ +)IVJ&"Z}Akpql`mvDXˋPh~c1f*rWs- *J[0Q$,|M!X H.jZ<ڧUJڧ0^u:ReL1gAvd1x ]`Mfi9%$]jv޵H9*. vk%RJle A#JDB/|fGſ=Y)A*hɛ(eƞh3q{'/aٰCxJ+Jˊ*h rZ/rqpșRۇ#qC*Hnr'ԝ8H.-@'gx]QYuG.!*O^A0Hz ]gN/+my\.øh `m|D_GnKR;x"Bbj|z7Jٴ>yA؁mĎ:)p)x)G3GCfO&gCΥLJeJV86 !YBA&_IOHiM> k#xHB9# x8@F(!x8wVEO'{L@ baV+ O^y-+>(bPT -YKmclLڠ2ɐTJo tD  6ٲ;ccyYXhJY¾ d)[V%-#S,ljidt!rKR[YH[f_|a]76WFĂ1I,!IrTXSJ`U$]5v5Wnul,E卶A3@a@'DE`K0Rll"J= W̽?mՍAvm? +kgH?ckFe/Xd%:u /k|ZK3ڙQbg%[#ͣeMG-ے5!Y<"YU~2${/ÄRZ (=gFC9|̔25@M#I[֤d_^Ėy+Z`Q $2kQ|hLu"D4(|ϋGٖtˇe>#P2놤HtUϑ[:mu8Yp9FQz\N?&N?.f2t5USm#LCEsLP\Qur#䘔I{lأ`w:PnzWBu=(ttN:%<0F1e2)V WU!O%6efwvthrԻ9csL)Z|/+$_|lPnC5 TtN.1rD̖{.pfV:ɸ *zmLqq>r zþ[X2mmܧ&0Vj9侌E-Ɍ\1udb(r~ZDq2Gj ir z`PB2$&0~a`$ 8<ߒMIOtv{CzIgg2 j>at^%Kc ]0ڨ[H(m%9 J!Hevz8vj݃uc%WrY8\*+%BzANIh%Nljnz#e_4@]K 1DIH M\:]rSNH}F$daV()BA+@*GȄ'n*Rn vO(4]P"k f5_u:UzY:S┚W8]>5=ݸ@bzz¢s6on'?|5Z_aDrMԠ^fC0|7,OY:dg珑bUejUlIhGpO׳rlZKjsɋ6hYDUQce+k^:gS\#  Օ,b:ɨKD+QfLcD @? G2p5\֥Ȳ4lk3Y!)4ok {g޲rYÞtoū)Iaϻ_WTSj߳o'`= S>ZUʉ1ϛ?#??0veYw}yE@r7n&SS x G㘤O%$Wlߟ0vvfӦQӌEljBVJqVk,g⛹ENvAP$|rH[o]m,q(][%|"avz ~l>~YZ ͯT{/ Xtq~gf7Ѵ!3*`KK  WNO$Odl!{brjjT&N$*|x݂yPuޘ)BgryrΉؼGjG cII)ڄ\^Q` \8`Rem,ɍtCA$2x.dtGt((NYYJ䶃,g ؆`3`WHklc{q>ylnGcy%?=G LӘH!35 b^ݼ.(gh^[܊ѕ-&*pQt Zi ɡ?]]O(=dvOyZşvC w+j߮0hMK-;CWkl DWPNW#] j]-o n/E՝ ^tKxEJgR11_F_d`PV<87kO_oTRq^}0u˰;!i.{RGi`2-“3A+#=b(m>~P_T1&!5PkvMtFi I?FyEe]i⼔1W V)O|3~^_o ?_ZFU/dNXraW 0W\$0dY+Ev*B2")Koһ%,%VxfV !|Ϊʺ+άu9Y0Mr6j {+,VXcrtN|H'oYfٝ[$6:PJBA~%4_ZxK #t0kg*h;]t :^*!" lg>sAıUAizzt%Bw `c cGo 7#])Q u0 ]3b Z} Ԭ׮#]i!WNUlc \+tUʣR!]aP`K*p ] JkWϑ,݇ҕEmw|$.ά]Z%ر4k+l=]u]/OJOOW;_A]v+nh| :r=] [F{L.]1gRIv„6%âҵCKx!4t+DWhE}4]PJ3i.]+.q*p誠骠TgHWB CtU ]3U赫TOWϐP*CtE9Κ]+XW誠UptUP#]I,UlygJԱUA-gAW?dT/R*RZfԘh\-$jxoA(RX4W-g&r̘kYoVC0v}Aj:O֓ i%R(j[GFpu,kOJ[lӼ*ᭂ7%57FZZn>n%!L=hj3p(.zjچF9a1nJfUS٬6QQqADByܞh 8a?Iϵc?O4̙\ݣ?!OYRo?{q4'&'PFD'!r[T^(JfeN ]2N[9b5=l5$r.}]:xJ˜p\tg1 YEd 7j歓GmBǤ@3(!z*D-#$X^5! s%yygA7FYT.[M ^{?Y#Y$`[Qk:2=HL1PRhіCUhuJ.%PnlJzI3, &Thu#Jj#ZjZ# ͈TM!z.Β0g2HII %Ʋ4&I態i YzBT3uc\I3YemΈFe$+7fח.UfF)9(D֓{bIT{A%A?'Z.C2h|Fţ .e-%ϱDSIpYְ\g@xiKִɇԺΪdHv$Йdepi&~NFp!90 %Rrme`XM 95Hbzl2Q-,'P ZAdQ;9L%(. mF#9A׊)7ePڲ sB8͎% d5C$5}O5)3(/blvU]ΩS`^F| DcC:WȔclDmaxLs v( :^-5 X q9YBy&nDUPd _&ܺslub.ˮMkv|d; PA&'D="jYSϻ uئ:HK/(yi80J#s Nz- ] &W$ZʍUz| AMj F5:/Q%(|"H&jZV"!@T. U U:?lq@) /. D,/x:7VTlh >X:;**@DUĝfE <-`9%^V0N+;: ں!""+pmpe,{%!z=.-Ũ<$ xҗU FYuDh e@7 Z%tMކFL3uz5|< hwX@zT̨ ƈZNmJ'0%6)6.63jAqX(&RPHƀ)E+YnT\pYT2N$Jhƚv7tXfHGYtgi&0 ֨H `f)4ͫA*U9u{YAދ,:h,WH6 "kӤU ak/t.Yu0qXq<[.`tZMYxf]v5].LnGge -骅 #I`d*mtnLvMKQbj]Э!58EMGlns mA*BwPzTaH>Jk$=|P@z`}VFu&id fV"bʅs <@bB|;?$ ^1 `\ة !-JFJQ5 =\ATXd t.1#dnІE8zV"5R&Dndj7kԪT gF (I<(0SLGjJҸh ҵ5Ώ;jyѦaL śZ#AKvG^up$k 6PX?ǰpHJfP z̀zQreߟF`J$nÈ9>X2Q@uQ`mJ]1J MK V;ͨ>rI"WŪR盉0K1:Fff%d;5"0"pŒct\NPklץ5];g7ۥ^m!Zf)`6Z΢kS[w.q0$oށ~rZ a;$3ӵټAFBe7s/vzmm>n1zvEJK`vv Y[k'&\覹`@ͯz\lbƝ^竳S. ?4{{[VX٪i=[MYN]o&o5gK7m#m{廲 )طuk[~*zh sPqێ*WPUhyL:k,'T\9* #%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*Gu5 xN\l9ڧ&@Q 璔@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ@Q\#F '@Q d^@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H l@#R`'(zs(J>u%PRY*#%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R(>^ZnWh+~x9oKM[eqvA_ݩڬ|hv$\RB2u<%`O^+%s.}0JǕ[v<'5s:tկmvO=]5+)]=t;tew4骙+fO=]5+t ӕ9~Dk++;tk><]5+tӕr.?6=bޜk'ج_-bXng].f/ߤh˼ʱ3)\:JNǢ ?O(M6MgVӾ{Lmb>Wz}69_d-OW vz N(~'?}mz6Y-i=n[|Ŷd1eYCmռ~f5( H~Gþ~XN؜!aIo/N&]ze5p ^l]θkg&dw_N^G'Nxpy[IR>;|){1~mY<|C跎{ahQ ;eSx?*J9!c$85^px{;?/~_vA0j_!].n*]Oi"/i5&z6cr[`{ncLfɢEPMW&6aǾrzA|p~,2fqJqN75B1NG+Vޒ yO@bNXaO>9y'JxrnOݬILN;LN{yKpRBuly ZxɧM_c}e/x8k 5'J^s#ً7rȃnBqVO|Q2_4J+>J>[S(EtJF%Dŷ}кg\?J+8{*n[$T^ހ{0v_}4v^]cѬM}CźK$<ݵ;@yqCޒבzmx`k0jùܣo&?_l=ucpwe4^6"Px&=w|>z~%}e9` 8p/P-|8 qǚ7O ѥ\.gyH?+{|G8=2W@N!p>fj?ޥCn1RjjkF65yx7t.>5 ,u~GYerŕ0nF}xhmd?MTzM(MW[oPϴK,|5S#]ʗf?5hN[+fEnw͊[9Fu7~[ᏺ3.$65wrfeOw> p|J_9[Ib3I2]h󇢝q▏-%{R-c-eRGkknVe*/kȸ4n::U9}qpyL IqRP!) (y\eK`z@_54%Wjf#Қi"6 @ƕA`[>s۲[ٖF}ٝ<5hǼ~m.V[|@Ù> nA9' <+E49!XˀjMc$<&`ґ-*ySKH$ d"P&_qV6(#*Gs8vY#gPW[cEu@taJ4&^Z=;F;hu1ҢE[+f\7m`=nlG]5jk̛8q.\Ni :$kPDֹQM8-M`~'b8頋I̷Ґ7q]_K+<9 D )Ayp,aQ_5@a%Ɨ>O3;tz)GJGU6@q~y3zڟIjb~410s}]wAQkM{ls_1,l='ڍ&uv Pj?{r ۄÄ1 %yE6u}¦Nm5?QbcɹN;2LJ%!&a@)UD-tkP4 ^h" by:F+ (oMFA7 ebH J(ܔCLjqVPXm :ST>("J/sOy\4w~M79x{=Z9ھ1d.qq'jvGž!r"A._d뤼z8WW :& }&n2 : ;\>zZϗK8w=k-3x鸭Qze׾:ca_zon8.ۙ .<G\wk۰H[ RUbW/w\[qZMP+NKAGEU~6<Gm"}ӭ?* ٬eLގzknW RX`,LS瑚DDJoUԌ&*Z)/[!A&'uD"B c 6*')_o oFgz4JHxmחQ2#_r^y\&yIw.]wvP^n(/g#@ &--I."?jjY 9 8ђ0OܣKU^L9{KrIy'EՒpUemʙ%% ȑj;02, :FZ"'rV嬑ߏh2 w9s!"!Jڐ4.!) xa!Nbd&1*)D||.*#pe"^yˀ`R;N-Lz29tZŠ_;iV{SɉDSb]R9 (\,pᴖ?4- 4*].}`i+#]b#7p"|X§ _R2Q H JF29ɁA3%DLa,xyrc+?cbr[ЅCz;w&udv:րFI$I08< MW&:~ӳ$0Vy+b:;iIbR^ o8jU/~~G>_>bsA -I2 B5T&Z"˖:VnzsC^; owWgB0F֢p45?)0E..{+5Nwڇ|ꫭO|FE18aOVGw᢯XQ-X%Oh!6nM?j%4xFqsLxMlЧ5 _~웿HyTB@"Qa#~v{\Q9yR} ![7qyքmS֮Z)IkmubvaɞWt_N%Og^vURB[x\YRm55"a[; -"eCܐqy4lKRB{^EC>׮w2:F^˜C rB)e"ur)\ІME $:'-R 9^Lgh${޸D@z^V.ơQ.Vy7o|z'|sսݾ_OI{}[+|v"bE>\BmyY§qd۫-Cm/'jZ[F7ΥtTZ[$)֑BS; B,Tz.*IOg(A +HTkzDLQS Rpc) 88_6L*|oOr2z vQ+ IS dgr*3{>LC>/00W,n^^^(34ސ@ ] KLRQޏHyf˦(327ﵰVI| Qy%Qy\$C%5% ZT6' xbNzcpe% M\ R0$F:߯Ï/K\}>5{La %Rg$ W'Z%NJGGe#^K]6R< 2FiYm\D"F!lpAP:9&þAIR.)J"PK $z_cQ>TPCD J-#W &|CBݩ>ڹ)u J;OuRs\L'D\DL)g1'TYg^:.*o-W c?F$c !'rT έ_S'#MUyqJ^I˺\ZK"{?&ulQ80ǩen4YVmCB38*ej "Y{[)巋G}*k3;Z8_ioe1{G$PEW)őMGwdl9ID;&|S)wͧP]c:4n,;8qrrXvvvci!!En1iV *-kwhc3T"ԝ_}]slKߗݙeT|tr E]T.+F-I…P_L] fbh>/q>t;E}/;q.F&%!BcT^FHS^h5dr)8$灏QRGק|хى C>qs:e=efiDv[r`,2ODރ±eokdne?2E,4i >MLYT1bXLrNtJ& oetLB-+7Y+G''CX0h9!9L4}J\&R I3*HkT9y$r.Mh"4F ֝dLN3zJqCM*8)Tj5Մd[ dZkA:3ҍ'^>vJ0Dr4FL bVKQk)@k/e.q";6)DPUhR%UIdL  1%H_6Dj!'ph&yMV.'ZkbpL CÛOWh^'\_we&j.L32AdKM`?EC |=p5x/#G2,-PB(*٬ƓOKA9k?% ȝ ICaJ$eG.7r'o)ox7KCΣS˧yCdypSo/Oہ3˓AzԻE@xb+ixGyQk3j^ɝ6]{x;?<~b0GvXzr*{o# ~77L@!V\ t2re+/8 o_zo[Am[gT/s/i"WY06&gK%И}e˓t|_Go^Ǘހto^ߋ7~8;pYg%If^K[ץzחVqڭᏸ>_NnGܙ=N7R柎g++rԼ1g$_d4?]q<UNƎ|  /XM.d'5^tU91Tdey͞wͪ1mg7i;ZI_3f]8B}'S$S ; "v1DىJGDD?>xN\%ssV,(2]*giKJ&bXeM1 z5^IFHʇE:r-_Ȧ6DιTeGvʞ|8BC*y&=nF#a_}j~k."U!8>;2}!ùI0"6s87նùYUC?U.ޛùѐ9s[\|:?JR}m"h;ueJFQdzw9̐wlX|猛3nz=4U-L\)t>)AZgV%'}* u{PA :\1N>+TR|P瘄3Z={yĶ֛ĮT,JNjoSog` OlČkoLŏ.N:2įʟͺu" w1$ߡϹ@վ*We˚nzö^H;u#-۰^3YaZR.kX8-YY"Z%TV۫J"YкJ'\tITDP1=Ѫf5(Mkorl d'$ykD9{>cD>%{LIɗ & et;$Tt:K%3zA`j*L^04 TU r 4I,9y9CoVDM'jy{1_$#\txu? %qQF]e7QSW,Z?뾇ziL^΃q_yWоy^\r>&J팃`L+"+1g4eͱ(Żoq\c8I)`6We%Z1ؒ hJb؛GTV鏅}MY{fYU(]~`~I=5Oty2[}oٟ'}fb'AIe62 VNYLR)C7Uj/V9C6gѳ-z^RMel2 f+}|;| z2Jɺubdzi^r1BFdžԖ_jK1R{`bS2y4*V |X_)9;@QD>\!EC.|A!"ȗ,%CcSQUgHz!|swJ_x(Dm|lJD虈0q$ÅH*h" `U~.Wgh='8f|QƂeeb &DKhU@&KL}M׉B]jr )(ٔ3őFGg hޑcD*2ё&ۢ)PU!YYő`}My{[@. ft|)փ_rytG$ُ~V m.dF5:les!ᆾU.W!7Bc!\` \Z%+V~ҟvprhW֋UJ5jqT vpr]3b WR.#W)!\`/\kWV:XW #Ǜv˝㷞\v:\R#6zP–K˙ˇ(m^O._ fJ|Ѣd'q"VGBMWП�~rqvxkyBs݁섟HNx뭵ӛY+lCf4˵`z:IrbJ/ECbN6+A XC8jqZ5+M3"FVpj5WҌN>~7+fA-;=ĕ\!\`'۱X.Vpj:XW#WVhtChǺb([v"0jq@(R v\\L.Y~ʃ4+UĮX-UW#0>-[Q_zrWS2t]5pF\mF_C"NB3bZ0t\J?j/q4fCjp$ 署N_hF>\*}2 >`2i>8m~':ٴ|Q|Nfivx݌5Nkv''k\\ Xj߽'5,]ni>rg촛Q3!G_eZ1/{;HcjyOO5zsUqk]|xoၿw>[ )Ʈ";T"w%.^bOdntcr۝>+-,tܡεs} BkI؝ }7kAa:zÚ^^@$PsvsBQMCvp\%[ -Z|$U1%-;X.4 e WRW{+Z9H0jlW,VpEjCکq#WZZZX*Vpj+V9:{+r-Ő` XlWVU # Vgb;+Vq*vzڈgSָpEjǺbp\ Q WRW{+ oW,fpriW`vVTStUz lvm]'w׵R:pJ90gЯ+?jӮep$mG@FKԫtn4 `mӬ Ӭ҉{ix$Z7+kUQT:qRZJm VJ4+\Z++V=joW$jW,׊Vpj:HĕylW$؛vprm3֕^7t\ʥ=#WH\\̊(u8t\J#숫=ĕ5,e;+,uz"JW{+gH0vB,רVpjCҏf+o9_k VEZUD5[Yz-U`vWr=r'Q)W\S-&jMzX+-{G\mZJ\6?Mu8rA*V1 \+cfj<԰_4.Q Y'Hkk)mi(U֚sV,mF\;Eio\Z'ۇt<,D.*^jifbЫo+YC2@G0>y bZŪ`@p̹ɏߖ϶uM+^gsO׊7Geu"`εGitV%?Eƞ-FS0pBs~Ȉ5u$A:pΈ^,TG;i]~K싦"UWդFDgZg -)a֊3Ɣ53S^9$=y*B& 5V4ީ3\"m,1+&&X/e@t)tub6z(ҎϿ[5 R5jC.y|FTj@HS–TGaCOg{6%x8KۛvY.fz>%AMB֫^^e1i S%ƱR}ڹLx 0! /', " CNevB[",dt>\pQJPho 6Dg5Vu,+s $).), x`3}@GKb=2YRΠ%=$&5tZ]&si<]r$"T[w9$-F+u"urnCF:.*1J{Y]* Hj35A3yG٦=CԳE gwa86=zsx<2O\%Kn?W`yRWfxݞq/i :쫻l7[{:oS;OMp"ֶz|Ԭh v8s+#zі\PnSFG e ft_Y2ӊ%Xp]тKooo@B-s?mMpr;͟iX3*XygH.La5)<&?9aHiك%"2q-8|lqYH PXeND*lWAelqD#@F"Vс_ wWyl#MSGf6ۅV!0!SB&l[;lsDk g4M wbGaSM-HܠL 1 Zɜ ! Q[A7ͳ1/”E+2QIT`gdASɘ-לBmN;{k&XWIHޟq{,8,QsꩺӕPԷ y_^.h&bo)w6 ̝ia Kދ0zpFeÂ: adl~FJ.YkëzF Z-aqUNdNƀŨ61ۘL[%j\q7c QϹy ,vWIhHsG3mO-kyv9o^3Vdu6ҭ&5.ϼtmio0`_iqwEs[}St>X\x ̶cYe0vl_s7~3bqD1%nΖffDmk3p4n`ŲGë@6ٞi{겓ZW2Йn׺q)#aMZ:7ν6ƭsU*S![[``rARGo{狧o<.ܛg͋ҊE&{!w'೦7mMWMS{˦Shz/;ݹ׋E)]uyG]aM1;H% )_~x:烺z6a(IGЃ#D6蝴OQt quqmGv&Ҝ^Կ+<ܷ-hN6@i~6RRlQa٨4J!:cHBH( *SY\@zNŝ KW51:@b&ngtPz}JUJ faM+nF){\"FK5,::#f[S-t4^ b vvioǫ8s2+.Օfc÷_n;4cg1ng\+m"xRLrF)VmLezo&*jv&x`V*:WL,a1GOWdhs̻G>}t#:t{~PP+FTrh pQ9$##OBSAE ZnO9PE<'Л̣s&` 1쌜B/ԇ!UbW|(˅qz[օ=KK!vD >]vw&9G«l.^=~UZU M.%qbHRyj"z} j;bZAiiA[T5*8EȚEm$cSqv^@Zgᘋ{"**.#J!x29/(L3rnlWeHD߾D'{L6 0ip4<(%)kdw&d$~eSAEŬe 7$ kQ5/Aԡ$$<,h-scA팜6^g8`C8'WǒQQv?l{􈌝&{G2Ooxr驝U.nz+Rd:7璗cR8 :\+Y=1"FL,Jd$Y&q;2CţwvCݱ><0Tj玄̫zyS> ~7ЦQ4yH;e?>Rֹ~\%ΦTWr&T`-(S1+U93e@10^xbUb)PHPd3؂`:rT@ dZ_:\1qv+uq[V9kL].ۅvD_\ ި.u^QY Kufi,v}&UFƳEpfC_)<Qdy ZKBj-!ނqF98yiFF0@pl9M{HEFYJ&B2K%=L.2BC'of5o7sh&afApX#mb>̉sH2drK, 9'K)ɐ93p͸OL)jz <93D\h!&sԐ6>RXr ;{]^[Ui/=O{T2^DX$!E+mڜxxU$Oةn&Bisw"iҲqL~~zm`tH)J&I#<:r}FLhAEH;X@:6 £ Bם`ؤQ ,wEm3:oR[ '@2 Zœ?wH3fnUMn,q{5ODxn_7o'efBYk; 6Q%r^/T<)<, zyi#MG" הkaupL+0 O)`l2vm|gX}MuC[JjxbiUmt}m rgnΕ!Oa-zhwd+ڽ,]|rq]0oGeg/ ϷrbV-O~B"_|mՒյ6͊V_Ͽb_᳍ʎC|u5P9;cO_T0jiKWs(W67,HZ_ogpA֋28;E].7t6Hn PORU^z/ڷ_ D~kW84y7(t@K%h'U%gc87šơ?zԹ[:fEhMM[$jMͣ;kbَ:<^Kbz~ i_mGS Am{GUeX&짼myn-^$Ȧvˋ}¦;.Jǰsߕ# +2LJ%;9@:%4PJ$Q Z#QҀ#xVd,˃Yك6ZW#?}>>_ @ R&ѠjM9$Qy6Gi63Ee<}z_Y,<4A}Ӣi6 /Θ :d@HȴOKоwd.hD^B4ˡ/< Cb>wb$mwvj{9?kcWt v+*h#^s jD!P\:Kg%Dh`oHN;kd4Jc,ZD,ZN£]d%NR^rJ<585Fx:S^o {MHmW X@ GKfv~栀2ZӾ*hupoH,r,AB )'j,Ny "@(2 Aiihi@/r_)D;M asD Ds4-rk1YwgO˱M}|伋 Lvv;--f˯ֱ u wJ-9kG 쎪Yx'.vM[E[[6ݻ?>ewҋ, ٛ_xjo/j^.Gy!-9lc-݃~K=wT.բGv$hvvꛇˢ7oDYvh :6ds +n&Ջj]Xj-&'*sȑflֲLoLu/ $eѡY1瑚DDJoUԌ&*Z)Z!A&'uD!B c 6*')_z8茡c?^p~9O3S8ǫ7jx4ٻ6y-49]j˛+ʓ⪢4IK ;#rR0ۍZZH>EuBZВ0O܃\L=Z[oOғȩ IN:7(% &$h[T,I+ƙ4@TC^U%`騔!(gI :cTeX Y1qԳF>B3ɝ[j$ZIAV ,(d38F% T2[yw^&i핷 8 (D"t?М#:T`' vJy`'Tr"єXTđHTA HKa(FQ8@g1q۫?йAx7gF6%x&Ίɠ}&pDi-qhb+cӨt9܊v9u zv(WlWTa[ƙ'k[L`G'L>GY&y4qipTPbWgwND@ *tұS6^9~g"eޝ!,T1s7$>_ LvN;z&v$ASh(cb+PȦ-xӋQ dN,:90kfH +O?8`2fէYu9nv8V_81ٻn}Y 3,N S܏4k~K\ݴ*r59(4b1Eէ8X.OClNji]~m60|w@eoJ rm-'@N|3%kS$X4*;sLPRKmL[t֩tTD4^pjϐ;Fg^ )4/-,r+߾ZhE޾~LxdVaǜGr&Oؿ?~VG%xB̀8Ydp,(Y3]lbooƫz%&x$x!88LM&`eS ˢWf9̋G ˌ]vV?,.5OmjQi{&o[ipNg =T]`lQ*,{}f}lX b:B\V\oxyw&pɣR (RfYZG OL3h <0LavEF ' %&M@"w&@)Q P)1Zfa{bb|?oIbC1Ln4Jy,SA$ѨGƫ?]q5DkǙ%%LOш00y=!  BOZOSm`B+d!2g1쉋M8͙^Dx4H+p܄ځa>|OK@< lsd3&fQS"E5Z Qޖ略5qpӥkw:Y͠"TS«%/߾hi55EYɔ!n7#h`: lXo`DL:Lg)`ag/2#)zWY\-W(-*KWGW@g},, t3xJPT W(01 WYZ WYJ:BT3!{W(0 \Bf ׮FpupݧvXU]ei,֮4"WY`#{W(.%/p[o\9v`!t͛ !S x -KIvw'%,]TVhrJHn~{m9ffqW4D TW• f\AQ޻0}1d2pܫ]]}W-)kUg W˜sYW[ń-SE3mfhiA{fiA{Oij翴FeiP<0?MJW6 *ZVBW,K q>g+ ړwJC W{ȍq\Amz\ #+0~bPԆr q3=m`7]|"w\0vQyVW`?DpapZ ɠuǕz*Zx+n+yF"ZV+Q\G9V֑iko\Mk4aO[iOS9\WOz2!<Z# {X+^h ew.!~4֍iQxJLi*W"q"(Ǖq8ˤ•j\}MRkiEv]x +uJi*vقTW"xJN)?9|\J\ %2In~\9o/Tj\ aMY~Vrڨg+Qɼq5 W"YAW[ZW1#wU +<}WPQ *rw+~ 0I}IrcT&jf\񂫧=-BG:segmwOh7LSx?rOvi =Gٓ}JzC~s&Q3)|YQN^;&i\j;S2{t]\$uCݨ8@zubnshaZsqRpR='3Aw=޼MK߼Z7? I~ח秧_rrϗ_)k\wWuț߾Ѧ]ݏ ^>F ~lzޏ+gY!vqZ+)(?yY}g~-G3B/]Z8Z"QV~^骖ۖ:˪֋jh$J-$•qs۰pՋĨ +&3D.Qp%j{$wuJ y;aM'WRW+#+66 +;Բ2sǕK1x*:ir+Qfﮠ2]"yK#(ƨ亠GԆ0@QWQkgGdJaܕ_j,npe9Xikﻚ$v5M'Zmn'.zГ@{ k\A5w\J\"gifǎnm{;!8h7 Eԙ1-*#/>@Lc W +qUJO Wlz \A0W"Qp{=vJc\ ,2# GrW"wߟNS˳Ǖtnʱwq$\ApP]\=v~qW+/+A4DpQ%WևJN2Sz*X +f\\F9T=|Epm438z`ȍa\A-b{лco3Mp'zԺ=:MeY1&-z1 v֛h5G7p驴$^麼ͫ}mPWbAK4M@\_$nwU.YNOKN}lDxyQP|]E.;1Pwm.1*vo'7.R9m˻G$nB7o5{/ gSz'4Y7o~n'ۨ=zrMkH_37/UF族 PT1/*1f't˖y85xs|A}8ygOc&tV}\>ZA_&m[z(mCo䂩Rՙ\ʍqT.$ra*/|ڟ~;6{}S{~r~|t-ogf`/W'gz'=QQ*Rw5r ={frLq]5L6jT ד?)3fTR1F۴֧.npa%^US5Ǝ¶& iI}Wjeߺ1:R#X`l]MȲF0n iXGjE}B\FФFufp \ZݭM/fĥ*ʊYdbL@Tؼ*b0P /~M8aVu\J' 7W f=futަja&a-7K9<t*=,]I VT6H (yCs ~XQ|NŢΈ A9JHDN̫ fo8dͭ!`$bcvYi(^B Oݗue pH#X܎m`}Ï&UߩjK-"V1j=8m&k)F(NEo#C~Pͻ1ݳ鶣ETep_Кv%l-ѧa)I)a<$H'^Bk&ŴhwmB[v#h5~43Ȣ |d!dbm4BgI% ٕ`ܚaJQq@ơ"bguE תENmXJ cF8P#YfRu9Qb5\%{-6_`}E, N{$Q542+I7u(Zުj^"[ZFFi` 2͠?f!\!LCj%AM.o+?Ct:Y_AtZL{ur~6uR$k0`ԭGwX7ۭ6hfѓ2EU<'D)Ilf1hmT|֚B$KBq)'ޮ Hy}GFvj3bO:V <%2`r[SP/O(7"fhkqnQD{I,WR SQSFA@%fdi if8`=WZT}zR [+T0,whE^H"6J1oGA>wnac`F|)LZ߯z8|AQEIIDɠ8{r5 lDX42 q')9[| \4,Q*ݫC`BvuUD`6c̬oR#4$ ~Rɞ= Vi5&B8r= N& XՔk&jpӥzDFe l h“Ed)a,!9J5~_׾eLG`RGb\&{_~%/^sܥ'5O'4]vax^EG_ l0|@jD͢x:f>f?Ӽ_V8>4N׻;˓A^;XԦn՘Kpoi%Ϛ3?=:5mU);k2SK|gv<( q;}:$ԓ!r< R$з!Z$BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $z$Yso@`){2$HY@! I EI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIgK1A=!V0dH*0Oj$Pj$^ tK͊\tU xuQTԷ͛5ܚ?0@*d<7'(X) /Jr A ($B ($B ($B ($B ($B ($B ($B ($B ($B ($B ($B K@IWʜÖ!-IL(qC5R.3M\l|3ʟMn5( F|k(Ǐ:839,ͬ{3qLb&u3889u"ӿ.?b6Tu0o?|W;!NX k.J}Ma,c&$MN^@E{h,R{UXHyF2HOÑ«^~OZ|k6MCv(<̉4xDk9xDi#Dg?γ I>Gd {5|KMoV P0ND*X.d$]T)=OW<%j_P ;O탔V } QѭZߗ{ -%&LCH<v, AZ)(*&\GfOGfÕRL8VEr~#v2yN[@6'hqj]}o:;>|?g?SN;HBG_z96:ui]wmGpgkq2 }!xY. ^Rc2d *Ӛ*JԢw[Ϝ K![w4+aũJQKW cn9~eM@!Kӹ-e. NW~t4duf[׹gqGu;Ѳ`d ,.0Hss#"ڔOM ׏]:vdJ^񒌴f4rZ]LVd1fه.:AHq&m'HIqd{ׁBeG fxU]&OK'8 U ֔AAeY|YKI&=;Og?]r6c~{prwUsGhL&Ꮿi.f(2*lQ jR63!8 6sWYF~)H\t^NXB7aMM7yNt-\ٯ}ֶn;i[Qo.Yk7ǹ!s &@-kq,+e%$IJXVJbYI,+e%$IJXVJbYI,+e%$IJXVJbYI,+e%$IJXVJbYI,+e%$IJXVJbYI,+e%$IJXV唕ZEښZo_G5tZr}PsX{TglS(Tlө(dTT+,k VzAL^Q(b,͎{IJJERqXɫAj27Y7W u>7(smadNGԮGӈ=P};r2ޯFT7?+ kJ+YlTxO%DFI f4z ,=轉F MY蝩^-6^_OAiODkV fb46'9 iƧB,qհ&}b;/MF^Ժp>s!8Wvw_.lDi]Xm1?p߻h0}<-]_{`<1թ|y齼o L'WkLΦR::Qlͫa&2k5Y"xhXY$-:pb.wj #ue"S1+)რ5k)L(uQ^Zk9E0QuGЛ8 (K_|ZXBtdYӎ cRS+9g23)Z\z*Y۬)vhҷkaq6n va[ٓԇkt Lc=s3C2+ɽ9ifp.Y]LQGkل`hoorHoa"솇 Nϧ{?0uC䯟/ަ.=̞J,nB(}{ݼ'2>,~4 ?!X%rz\u5b65|i% ]dB讜? W+F9Eml՚r0jbC`8[hVj_yq9DDlN}ХϳorAӬ 0ió[P|-ZrVWukj{UJ|h yy3Y2 ,]GAdfVnq%zw,W~ǛEݜOAn%ݪזwvj6+h=|p^5~_5ñOMC_OwKwYiֻc|X@a)hz\/g/,FશKhEJbmN?6eI~Ws&~zx|#HRvhbۜM.x5ǩ"umH-?}`_30SpX.]OٽF.4]kwh6n|ӅLg&R5|`h@`8ruNI7]umN.j=_Eh`ŭum Nu' ϼNNbAK/d *:!{f%Ƙ4[e)㌂|L/ eT`xv0m 6i]xiVSY*s0yc>@ep+t@׋ѫ!YYhw:G&8&OgGyM}Q?݆$YSyF}ZDɰt=+Ć¢ ^,-(=>jq&| d< oV=nm#7m@yM\#E UM.a =.pd$?}Cl鳪fMaDZ/:^ c&SĖvlݮŖw-$K eau 4jd1:]'QJEƖnbWzJ',q[K=J?N K % OXz',qGU_[m5dqQ6XODY>BHI zTҪlv>]N0ӆكtԽͤS=7 ֶ-[[Q΁ӠBPL+c<&(c^̂"^ ɂ5ȔyVDFrK$RX0eĹA ipʻ&6@i5ǵ6[FmY lZgmMxn.]{ж =wpC *t`ݎɴ_Ked69$dX@ ϜHArp^t'IvVn2J>1fKyNX=r\L7=C{m+h/n%Uŧ(ʂˇ5!7q5tG [gis݋۶:8mW^g"Queͼ5gw<}L'-wߗ >P[|e;uR.#TBZ̥Z/X,Q*GG}؃OJVyF2H *8pQgjq_u)$@Gab$@[cYVnU*}UDI%;%U3L|AB310Fzj#%DR}H&* Z|I ,jӭ3A26o<'⬍ ٭s oþA~|s|_հFy;3{%bϗ=Rt'-|y&]5'Ig7/-Ң I][b@B*U?t̮3!x$gΤcJ\'aIq`n9Կn9K.?էc퓌吙\)ؒDg :mA9(d&9'9_c WO 06T>jjg<Ծ+7Є շE %;;: v*Ʃy@5#jO\ Qq-cͦlB!2)&b"0nEH ͑XT5k.%YN'l7|ND,.Ƈq7ͭN//à+҅|vUwmswp-+%Ėkvlx5n,?],YO7Vo|Pbn:mo*cYIdiu@76_H6>4#~Wu{Tmvj+A5"ۮw[.|b雈n=u,קnoP~aą/blnEnslJ9% mV7 [نݥ}MO߆ݥDa۰UׄJ[0B9 <)1ÄpiL6=_- {bSߣ~ϕbRlcY|NmH/?`9N s.YanΤ[?X{F,1GšY?=HV*TITTJ2&9*pK*ٞ>to};ďt~9n*е z\'SvKܺC܁⁨~uf9_>)^_hz(fқST{͎M禬1Ĭ/:WM%-wdx<YgiXxkbWjP -m}l1 vM˾kd*( Lh\ b2*fu-wF"H*xlgΖvGҽPPT=s 4Ȅ1si:BZS!1;ai֨"` טF?_9+ l|obͮ6%RPSh:ʞYmJJjg5 jՋ9Nkol_m&!kri(|ˡYᔢ+R$ !8pg_88ɾu|:M#]iESQGAu!G,iIЪ C!?VH)N2<[۸\sC_b_oddBmcKi3XS zt%V b1H5ELPXb1T,#g=#*>|r\/o9}Z85@5Kr`Mr-U)dS)j!6w]=@. ,b3/*LA4!†˜lccat|8;+'K?Mp&ܦ")}mUCPȦ H/nk4`So~KCkMu|X &V4 jj-Jq#SDs-Nz[K [ h|]-cZ\y6'ťc`$ص﵊ TWnBN(ڛmll\jn9#װ%MhgHWJ&l'Sߝ-R!CMqrD~lTrqiF#y)+R44b4ddk}"4)r(bǚSOhA]2h; MN#\}ʊRZ3;66h TgT<6W _o;g; SGbO ̳Z=Иn?[y%R'QC cS#7ӏL?.-cS)ĹO؏- ԖNqr*yЈ9:5)8A!rqbSҊisAsq<=xsh@no! D$r) PhJl}%%ĽAq"vju25(T6T-IT2.>W9pmp:L_ާ5<.`tn=7o~8ϝj`p:}.k8w9<'7r<>$c ~7<+o(*6(1f۸Ac} 0CL*K-d;t$6V!v$S#'[*0>IBnF{ajV Gic}a i ^|xC6zq<|rOƾǮF]Lm8LBA`&i4P2.7a(d;aXN>.K-D̵Pj]U5,FcIqP(j3" ]D.k/MQXh{D;{#>$t59ED_3p {riFUվs z/ݱE5d46ަ93k.@(6BUKv>UR te\%0//~M&F=є"H$ŋQhR"q(Qb_f~qo򝣴0 |vaCAB>-ȝ~|4XXQ,w(_\ՏwsE"\o%Pp>|(cNT-6bpr1znu&y)|_ud .kj&{7{A_C$ΑCva^)yn#k_B&#)'{(:h^?JwϮGsB$ R Xc!T1[؀JR _F(AX^7[2ꚰ|ksE'jG%A+ےQ9Z;mg͓;zL߶NG8[U)5ؐ,dh@'읳JVCC^j.TVmNe֝e:39Uɭslշ`]MS0RhVWQbWZFlX @j$VJ#B͚8Z s,MlHc 1dr | 뭤6W1ZpC,끘aR \{sfJ{4uuqǻ[}]Y$='.#ZmL%],'oeǧt3#O2U=ӿ1dx'Ӧ۸q7液K?Qӓ@/ny⁇ζm[E/mgC/o&iM;L"Dwmkٹv9ԋ$N6qBCH `Q/_uٴb]8/Ӻz#taYEX5|Dx('r\G_לbu}Q8}맏|ߟGO_ʿ~ǯ_88o*ڬ:cFAZɥ|v.^Z{_,Nѩ=6|u?>Z_3vܶijG'jUvd;A-O} 3yToX͏ 0/VjsOV!^#׷ځ96lxJ#POmaDTIb0>lU9w 4xiJ[[*L6J Ge:=ذU^֞pH|MF%*S;Cm3Bco ӡ1V?&4RtneLg՜_ƭquw0n!#;_mxY3m?ՆѶFfje[#5"mr l[#'8FW%]9L~6u5@]ya@[F&*+5:u]!u5@]yqv=BW=]!Swҋztz7)* X;qU]w]U%++uuӬҤW9."ڻ"#6j@:pf~m,6E-wFBQuBHSG_?-ׇx70s61T#FNj'X;p_V['il4M2p4ҩjZ+$V%>Jµ֫uEAg] PW(N1;J61;҉(b+rBH."ZiSQ^C+ױѕ-2@u銀 ZEWDE"ʐ;Cԕ X6B\)L ZH3H6mМ&20P{NEWDu"J'2|={f?! XCqg]ц&2DQ# ]]f:%7=VS5pؘtkけM\4MޥitfMRӎz96G-֭2fozc6dtn.]X߳›7A p gR=eM(ߜ,W爿Kze<|"}H񚮱ьBbŠG Yy0Zkqzlc7+շXT>{E#a^@%B13P̱6@\*hދfkc,X8?;=TzMgf.7(rWLCz^xb^bS%NۉL<@F2kgd5|UuE*MLv!;+]Z (-BuXy])wcF ,.6*˥MRbϑ!v! p5p2泮EW`5ZWpz iL~(uBzF"ltR*."Zx+Qu5@]Y{a8FWJ5]!v:u]!YWCԕFN+'NMiJ3H:d] PWz%5#]0EWD{ +Rd] PWagӻl@Z6vɷҦAK {f}_WQ:pUϯ6EzӼ(ʐZ*D*d]4%(ޠET) Xo}5"Ilci՞ք5MfMP.QA{6"\chڥ42J{H>"\FWDkT"̺]7Rp $FWH{!+Թu5D]EЌt]g3ĀJuE*Ǯ+OgpM+u.u]ej g]݋\Ђ7.BZ)u"ܺ|0FB\tEJqj\u7:lOp ]K~"Q<2O'鸉"CU4hЇ")3 QYW7zE/ @Ry>??2io\Io60l)xI T!WOyزe'KlirePl.#s.s۪%6ʙJQO%Qd])uQz# ,haqt5-].@(z2 6#]Ӻ"\hN]WDTueDa銀03EWDk]"J++r:f+lbWDkCg] PW49DZ]5\tE>Rv'f] GW^Z8u 86"\>"ZRQj#]!T|ZW ltERQW+gˉP"u,0=*W8ZӼ8JHlޕЕ̺iK4F"`/ \t T"J)+YG?PLb$Tځq7PORA'0?5.q=p4t5MgMPZ+5#]6"\ i(;2d] GW2ZW]njR٬h$f+58J+C0,!$]\tE:!ȺHo9u 8E ]mALκ=l:ZG$dp_7bhC;sN<_$]qj=?ۏR4}]rKE2=>?maGEX23 cV#\` #Z+Soˑ!Ռ NyF"W ֋D|ueRA1k'\&pORŔ!YӴ6O qd'Z|g(˺us,.5lZWDo]eYWUp>&Eݞe+EWDkQ:Ⱥjtz=/n,M߳p}ϭ(Z%{z>R'62#tn\%=fƿN*;q #l𒑦s:WBM_HE5 LV%jҨJ{p;>sZʐu5@]8銀= EW?H]WDrj2*g ltEMhUH]WDsjIWlĮײi]K>vEe] PW.+VOp+k6͝!a+ױ;D|uE`+v׳DZ/o]YW_`Ϭ'j {UuGz$tf':$#ZM LGfG[uK|/t^z)pIo_{J&:J,tJ}cΨk̥7㲳3N3m֢r꼃'g(!tQ"qVq -m_b(}ȡPF"J -+R鬫 F0{FW~a>+T6j2y ltEsтI]WDirgpRIWb+6+5.u]5YWԕ*w$-P;:6+ :u]!eyuQ1P;6+Ǯu FW+ټ#A*QϺjtez3JǮp q q6 &BW&YOkHWl4]\tE^+ u5D]Ϙm&0Rr q'kEUk@H( ⷓ/ѓ'}`?o[.y[Y{YYe+ŲJRJ6Ul/Dē8JeԆ]\tJuE*d] PWXHW]!V%k$J;Cԕ~m?X?菙%vA2'`/ϴ{?g!=A-g =.6a_'jUiÇ~ E4BUΪƺʕsgoJpt*UW *A.6$/mrϞru:-DX)Z-47ӵE 3fMYݔs]AٹTХ^7 l%|YE Im8T((_NNSw??9GQ~#ڃW]]^ 6.?Ú-fslQv+Ic :S>smG?*>WӷX.[]?@{PÇ/ōwOjuFNGHhYUggM=mUytD#I~!cKuw`q;E$eƢ_-iJKRr`U)qDZjI4 lK3fMOUSQo=G2ƋoWԐ|e9'Ҕ4bq6'%~|~L *COŻrzyh1vAwt<]1{~iD2n>DOUa ­퓒ںr9g\ϰ+7%F"gW2<F%{D42569Ȅ`o={+v(iy$6fvA2͌sI*Y=@XBNI:nW{L  e#N BhxVRcRpѲ^5rVYZG1AH+'c~-o7q<cL_n~M~$tpN `O@-:kM6TN$Db 'WD,X!S ab!H εx(Kvܻ0'~<[,~qa|Dhؾ/C}}e >cI HJ r8b"Xҩl ODp(@{1h;6à@1XZ#I/}BcyhEҌD"rDŽw)PU1pY@0Ťf-FcP$Rhɝs))#`j᪑[1L}4}? ҋ9!pG62ZA#nH{?Et^<)Q0qLxXNgS^) mJ'=)1(.c} LpQ"sbPظɲgMgϓ_|AJU4)K)JP" N2DVq\3; QY*{d9:ɪ7Ft$T90WY[ dzE¾lu G[D0Ҫ*!5gGl`FKO^U-]RjɼdqYF#Iؖm)ٖZIv&g.,LBq|J!P&e&\$$@@+`[p۲;ٖ_Wjsz>y2 £qIDC$s8"Oɔ Y0>Dr D]]ZL^0Ӫ-}}v`>Jƫ_~iXӠAi%ɢ-V!@=471"Bȴ8],F6ȌmgIHK%ێoȀ5NikLD e a` )@H'>ިFnYt!5U2ΩZ{=V;rDeSF% eԎ !o2^+D85ɫQ+Ek& ɭ&WDrv.Hgƥ,I!k1ka[2YiZ{rXv)e9q6LdV`22"3H;`Ɣ -I2gee9FΞr՗ʰ;.naRSQگbHi Br&ƼQ2gB \s>@Mx))=ItfG` Yt$>WUfA`JytU jd;i%-גȐ3I(٩ h6JHJ \: 8(Ϯw:??{5߽ n6MhJӪeB9M%LKG#jlJNJCpp׾4epmԔ`zCj,rW3oY6~& :sK !w,79(ޜ4UŃ"4zRZ QBQ CRzr_hz&/riJ`5^T$v|ѽQdS'es΄iufNH1%K9GV%>e6YdȊujɽss#˄4ؽ˯Cә&Y&5@FMI4\)u(>YN > zK[zʘ>'iwup<풗4ixo/'Z{]Aw|ƾ]f'5iyVa' t>BiTEO'i:Zи=~"S0z;m~sWnwazi*osϾŏGsipԍөM]4Ӥr)ܕjPkNKmp֭tTO cJAT Py?0 P9$l㕷7 L20U/c=_tUrIQS*D4a*(6BL$9sYF˸VZ42ggr @\ޭ=o˿ߖ=dCAzݲ=v=BmWٗ /e l 9x)\9~JV GO{x$ ]2*] K.E\| eQ{`#Ǥg%iMRFr *'ȝAxo^H8@4ѓ>*je;<`n8y"Fj'tƇ 2x'xC\{,`T,K3':dݟKyX{._|Mٻ6Ux}`y6Fܢ@CؗYE*<2Z?w,MibbuTU!BP"^!w}GD |iS3BrH֢2&ZLF'7[N5Λd Z*j"8la߲&%r{6_^Bd09//Ff6uggh:ë:͍\/[vyëVvX7o4tp3~3~zE:=:)kI( ekW/ rh tOO?9SdOsn//|yku~yMxfqZyKr1n}k/X~G;q=gQviOQu_L@n o\˞~:0kh'[4gt6Ui6߽-1ܣ"K`s\ 76fKj+@jKS5V̯/r6쓤߻dEB :>OP>Β^#h DEØQ%GDp(^XBmRc$* RI.P3oA&%cF'3țj筼WGUeu= hd,xf%WAKsFC%]-Ƚ&Rr=ΪT#.Ї0T*x_g&`C.[.pr.LHK an6^=3+_{fLu*ձZ)*YKxmV۬z߶Y9۟mGfDD&Fjj=%XC2WJR "g /-p{hM}(=:t1-7py\ mDt^E.mTi҆BPrdl _.۷=6q ֒T2] 0T8H  (.hX~' 4M ;֍r 9wA MĥrA;r`9C@k NjI9auNm!z+ސi@Ҧ !xCEH ԯ =ܹ!NҤ5JrMAH,hE \(/8xC9J":Ν>wΤzOrvx.5yr3L? ?K]B2B*! /dVuJ@' i~((L^@1玗N}J̱dT ~p">0dO15hd AE9aWVS]U^WST0^Q@J) URkrZ 7^ ټY M3EN\uqj%R?QO۫Y'˫5JjYy2Va_tckVjѵ /96faoFAD~ŶѲ\5"d(}F1l "?QS/2rIU 1cހz/ Ax3k{:uHT|6Yԓ1Qʱ`QeTc^J4_ۓ;ZW}t8if9]0Y0qUsτW-Vl.6 TMת_=WK-h飖6q3,.g+^E.kD&R@E>`fION}tJf:^롘Y#`˶5DZ]\*ۄc|t=XHTEcF}F Srm;snOS _UPAmYF % ' UI!u < F##BkMLYP{#~AO댰&+CFo|d}*_{Q[2j3sށĤQ:M1 r^j*Q(E}Q= h/7xF=kDzЈ%D*(^yARS5x(s:3AVg! !$1Şj EqmcSk5KY 8&4w"dgsFp=Ū!O޸zQA/Z(RkQ=y DU$D4A/B//x>=GFP@- ^wd^WSȍ6>;s> 7;D?>O!۹d2:JHsa$@R2Ah*S̹_$Cn9Ue\EHenR-ĻP^MƟw[iW^<_,&Q)ML $NnS2 @M$h}.4ꇕ~"V)9/ݍZFt_C>xυrT'p@)c3(#nw#AEo3>[pvs} '&F0R"-2Y̳f vf '1Íz GWGMȫwx卫>/}s>ފ` ~7Faxg؊\Ir|߿v;j--T܎J-Q2E8R[)<TĵՋ rݫ`贁G_́+^!+^k@=q%ɺ%Gog Ű|x,PPnٯ]s~UbOw׾vڻEJwɨj)6P~6T|5~jgo]xO z<,HE` 笟_So$ u VŨLy1*Sk^]!҃ͨ+ԫc" ϭFϫD"yèrԕzRuةBqS?Mzڋʍ^8Ɓj,⚎yѢ:O?.~MgOj<#c5|4 /ԧ:;=eCl,2W3]bւWEu1滟)sJb_o_?{׶F\=KfU^8»/;V8:b o&)iw">APB ؤH &F"4Uy9*&}hj֦bpC H: ̲uǙcf)YVWUS(0$Nah{Iʓfy"gY+n:xR@ql2 WgijQM7LM۽:S@VAC1k0)G]D,%ޟa3n֬-H (~=Xܟ;_~҂I_0W_Q޶2gGqJz1s)}Kk]Tu%礵 $sɨu{u\y)F+uVQ7'rqJ-rW'K5 ܇%q+pYZnƃ N{/Dc4F/|9oߩxۖt=::?hIOI\5D.#!"!,zB=  _ MᢩB5MUګg/*,BYz'gᗓ/- cO''?A>u/]\~ߦsF$_&qcʗW+Z+^L,FVDTl>S{T?Tf}bu9H͖s읃W"AWBRaӭifPPڱ Am刳&5@\ӫ[1gg1/^5!fxkAza[Gy{D#NPW?كIyf@x1YׁjC&N+[ \︵}.T72^-Ym$iagJ!M|ukQ/+|m}#g2Cn8T>jՒitfٽxݏ/>o xc;|Ͻz9/Ra&m1/wHELJT7h5VjbD\[Rb#w]$`4#6~ qt/>XހoH{&)$bsFd$Y)i74K^ S~e-_^kx}+??}"Aa.CTz{:ߥAHŋ Dd.b2YeG˝r0|ws,)m]VX|UI9-n 7e%qV;EC]P\P42H>(McxiRRlgv+To]MQK>&`QbiY"Ai 69z7RVPC1vkG>(t{2eSvMY]HӔIʔ7D[ USPk6oώ<ȓV{?bvVqQULWh>QLEnS-!8<%Re6d6qOWOt%7Φ՞ɍW>O?5lu'Ax0 40j0X3nYGXN"k5ZǀѼxVD ۧ:Z1Lk1TP3_Q*]IŬ)QŇ^T( %G&GXKR|xh ^_u̦!ZlG\HV\2Y? 51!7wp̍{ܸ:$hWl> eȟOz{RKfb]$eMՔgM֝B:P?ȭgĮOƣE{'{w #^z:hS )b>&ǫ{o7{|X,n7|S;ǶkQG^:֎fv>)hHd]*-ul rVu:;nNv˖ȘqDz {dԾ 1vPn}ӋR~'.hiQg1\ T%ɫ"[SZ)-iF3kuKI>jժB*fذ)vdŅES 21wZ[xHj`6JKA2BnNi稚(-VlE?gB@9dS1+DlWOXj!&Z+u]%BO ،q 8)J#MZN1*w'C5Ǐ%2Ⱥx( (S6y{GeXh)e)5L<;DN$=Kk[IqV~"J?AF8@:x[:>|d%^b&œ촇 Z%C$&W-~z1'g:"nUY{US#gJ̱6ϬTE(Q̶j6jɵS4Jgv:l2?̠ НcQ"la GmĄ┴WixNaK^ !CH8j%|IА)A`Iru^(A{;YGlTQtQM| Ɗ-6KZrc'E2)t:ZfB ǪA/tC0Q#^& D3"CiS),آkLv+(*T :]-w X v*5Ԁ휭ӈCPPϪ$JR- ]e\C/8<Ő's#ڰlp9M [v:7 CKduhs]Fᗦ(}p,}U2 < ]4-*``:qL`AhVaA9=%E@iA2 ! +}$ #]!28`G_6aHp#Uϳ;VVɒ3LP)iaM0w[ ^ d*%`}}6FASaEO?bVn@| \ˇ$ @>´d7@ @0 hwPcQg|`lJuX exٗ;%,u&gŌ3ZM6(-1NN/G⪯[?-,$fJ$  ?A.jXAo96 adV&tSQjY"BH'rOV2 ~M-B:b8k4QyFF5؁n jWJPܷފ{)Ex& 7+ݭE٠!W |zD+V(A1ĥ BstiZXy?{Frl qH~ȇc$HEF?-E>DZ H0g8SS]]U]ޙt B+^]M;O'ems F&AK@YbGf!骅 5&SFgTv( \;5,ua I^kpÒU F;㺁(o7ofvn0)!VMC|̰C<Ralv鹆 ʦV.Tu B`! UH>* ' r5zjڮz k&i O º"`1\ƒrqr- ۷ iX&yM^+gbaB !0,JcxRX8 T&^+?MsP`-`pyڰ(RV=+NF1VF3725^/O{&8>@$T5%i\4'02ˇGH| A>5ƛZPٗ9ƒ0p(k 6@aa87`93#`Z ^ p_|J0ܰF|0Ƈ(󦱧QBOFA$%\`3I% p/ABV;MS 9$VŪt("L0eb12 fV7=I"ðK^`r 0[@$X؄M FT(]!ީq1c@#,BO7NWj9JS[w.q06;<.iZ[&yL*>*s{]of#XzV5igJ$YC;C;+eigAJΈv5*{y>L`I#x߹?s7/- =v*xӁX;,  ,{LCpC@{҃[mU֮zDZ[lř!pw[I f\\s|QM$o!# .\ c Cջz@0Q0 uND@B'u 7 3V┦][dv o&]W{[q0>  |?w͕y8R3ľv /!b"֡Ȓ3,"rt\iQp42L1}`%YIo`~T#тX_Y0%xzF3S<`:.)WbtIXki GِLFpTrcr"\ۣ9vDslV2#9":ί9='ܸpSN+8m8y|#閱gǍ78|on B9-ڑ "bg"^ A@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D D@D z u/`!q1@ GO dD `!퉁V*z:a3./NOY2luPɛN8';bC`a2HfO=)_#wa9S=ɶt Sb $> [RXgD0 1uT\g2;f9uMIAZA4$i&/ѠADO>栙WQӈb~9⺀O0[#/vг]qbL7ƹ9>]B^xOG[h*$w1$)sc]V*]Dޫ=nUc^= Y{iem];8gE,hkI2J:Ulڦk LeCﵙ8{wdcvl;?D=p eMMTnL'#xQ \KnSեBܙ `9^fNe 'j3qì̺8M7}nv\]V};=)QYx~6Ӌ|_Ehl\NCMo7Ŭo ^ry+a\>T{Ŷ)lM/x-L+1\.BBbsVƃpYc.Js.oK26~Wig [iơ7lµDlWniqo|><:$t>olو+:UIHi7QJbIuɪ`0 Y0ͦ46!Vl+v*/ʃU*Z~bnZmZj;bIDV5&y&B֗dJΎ+x\v jcCNsr"x [ X 8`pFV =l&?lcXfqE-"'HqG2 Y[n > V{beH]Bf.ܸQˢVo?x[Ncu6ӒCjlE;3ZGg ׊yQaD%"qNJ2!YYFOa&J;=< L0Pc5l?zGqQF9QGՏԷGRC}{o=ԷPC}{o=ԷPC}{o=ԷPC}{o=ԷPC}{o=ԷPC}{o=oץ[ZoŻ4R veAS)yH ;#Kj3kr 6C =6C fx%}]eyI7 !Yjrs⵫Yf_KC0ߎ}Dؾ) Nd>4:]OzcoF}*sŐ00byu& ٷ -hn%-x(Pox 0C>*ՙgxK+E{8Y=ܖᣅgbn+B412W/Љ ,^hHSpo$/\\K Mˡ%<%ꚇ0wN(*[ `0,X[-0CrWbss}i{q<e8llP;xx22G-3u)k4F*&vBp FE HiJ9֐r8{ kkz4ݠv堰a#! fz@8:|?-"wi }HΧN^˝+kXt֗܇O'_Sձ) YgR{'yڧ\F(а3瞻| < :{eR^u<aB!'ZM5!S+XfRYZHE7D}&Ra ^mpSLdJMP>JSTj! K-&RۉtcA󊅃2 *"IeBf)M,3bPRû !}h u 1'(%OaOoOS 0/[ S͛яat&2ZqmľaD hGz|ԻDPcW&՞cKG&dxZ\2qrN>`9'$"ߜ̯^s-:JPM CJl2mkq 8Q ܼ[!< 0Br3 _ۯV[^Boޜ^]\n~- Nx}zu>?I{\]G{iY?ڬ։31{F eߍJKR+*]ބ87 k;2/_ޟ;8wGwHI/#!r$T[H3J!:cH> xi\j<g |Oeʵ}du1&6=n) ෴VS0 ѦJ{+r-&^JӞN=y<1t1w:WvI2lY[`'*{) p䍬HR^Er)O@DS5PL 3%N,Rn-NfZUgI/Ơ{pR>=X5"")j$jxxU$/=D =8wfɫ&}hc8pqo`x>kۭ8<ȕvGT{+{M )l[Ru!RIox<:rO4gB }>nNV3a'!M1I[qPI!8n' Oâ$hd4c1PK0ED0Odg3)ykQ娀|>$z3q "u{\5"^aw{|XMݲ5/5\>2gL!N_!XƬaV8!U8 0"\^pmC֗O* 4?\ EqeWmR>2c{|eEZ_*=PIМYg%5O 1ً(o~VóWl+\tCYqQS5{)sy΍kn5JagEUAK]"|%j|(mFahQv,և5,*G-IDo rK̯٬-^5ϿfdG!^h|] 6m.Cb'T05\B".j \OźM;^!"Yqx;nGvsR8:Yu?uεoo ]D|}Um_ySڣW~7[fc87ӯhgr6h%)¾Є)k$jBӅ֛{f>ݛ p>\.[wp"ѠtQ[bV{~`ΰMx0DO.r=_P7?Xuqgve!Btѯw%vD4u~i+TI Y#XBTI5B%) ?1~E~e12:F ;p{Cݙn܋zhV!.{_hznJMϭr*Ϭ?E:1n\6Ge>wPK&WR̙wFYF*Wa-{GqD2u VDBdاe>J pk]V,hD$|!=oby]6Ͷ#E]6B;ӦsEӓ?osEEbck@h DZєg_Y 4([7T$52J1#"#'1.2ELhR /EJ؎-3q6[Rx2*Zgk_7ֆ8rELQk>|RQ='WR-\}qFBRDKĕ6P#W>Ml~ǖ[l)N"Θh-E/sdT8s@7DH apAǖ/Qj(vEo2l7@aeAǰՋH% j/oMEmm)BO\$8ep.}k#T9@@C9M 5FB{9vb{6ۙN)wthw7g|D=U&B`^ya,ׯя'!xm kJ|ۃ:MWn./#'_"0%5Ne`}(}}OkC{ʃ]-Y&ɒap_)\j+;kCߪ4 ^w y{D袭#[fMQi$|vToh6u\R_~[MEqs}Ew@ڻEÆ3A0Mk{?P O:p|"W`ۛg Ǐ bΒRD<)) •4] gP\๸++ {tWRc7IZnNY-f%A^p>}{UY tq] uS+e} 8=QY&t w9Ğ^BW[EugFwǍѭ\Z|8> Qcq#C٬D',R:VZ[a+&VB H dc=t c3}ؤ=٤,g=u b1J&ijb (w[ %Q.mrpk1lgtA4@lZpKS^@!0lPZ  huQ]a5m7B`_?.ܖ'dNNP{MNGyrX *\^L,bx2I3Qd4x)~) *z6&烤XdW2Nx?Ͱ:@Nq*y8[z>vB̟S{3gpjg59( ԗ$b Z)*!= +MɍÌ 4>_̮4^EvF/zLf:wd;į*9Mr@;e_7SQ!v7LM`Ce*tzWF&B>! KWbG|W5h|:\؞_N5eݺC$W-ȍcU.kmՇ\ffvFytțy6o踩1e[Mپ~tsE6I!Еvk|>ƏQ%[{(va~MmI ƺh( eq {.D(-#DY~S%(=̒!q`KJRѾa|{WNѠɾaE$eё#"ͽH+$䤎(qQ;Cpc 6*')5SglN>tLU G8 J"t͂|sv =/ʃlAH8UbZ l&s2:FHefB|RI.Rg-ךROm"h.*b-Θ 㥬*)񤃪hat`,uVR 6N=:z"t$: {2!FlBC[P&.@ Jueby_Wk>V ! Vb*f=eL/iq1F.QzVmGmbx? ^2T\'j٪_F+V oLx~w e~m{t ,|v[BzL5WECmR 58v*!A*i;T:alTnԗʺ$”'<SA KH!)p!`s*S LR/βQFGυV()5; D锨]S$7Wx˖my!߽./ֽ\j˶, a%3XXaiRei%#Y1a2Kk<ᬯYk5@\['i}bkp Ƕ1OP@=Rvnt_OG ( <06$y޴KőPzT~T[q`)<)XfdM0^ffǝQ[XR5n9_kv#O"8OGkd}f" cb Lߴyid $MYO4!=>?\PR10t@`ظ ٹ\j}W\\?h(W󛭿_iE^4& };_w֦.7xӳ$;}7[O[y&?Yڹ5L7l{~8?o]Sm z(g~svnOl~Ύ`$jUNj,=򱚸nW>A2;/?3 oe}B/;t(/ko-q߼998%w Ngd|,\ ֎׏:O!.aT7+tv.RTZaq໏'g]f7MTio.Kv%95;dV/h̚ ڑX4Ἰ)n>FHMLgϬR.B5q. ꏎjk !O'O6Mct_eljYqɚ]X i5A.wT":<$/+XӚ(k:E-;N|#ԲY6vx\Ǝj2|*C FP:;x'I`܀ٔb{ LFAOմi%=YJ,ԭ9fV(sY^@ 6jaQ2' /UKY4KJ 5\PAP Qm+.Ƶލ&ڤx\W\{K0W<^!n!؛}bv(<͓} \,LR,MChv!'6\&%XrX㬒J+MW[}tr~&}aʱ z0؊D$p)Ug=GWɦj] %1ޅq|mLI( $)4(yΩ0f\1 GKI? ^TJTYkI÷/a K6Oam#ˊWrw*3U.W>!=Ȇ/G^5G1C5 Tѧ<>z^Ƶqsdy ʊ¼"#U@t$D6o)IueCG`|Qѫ AfP!H*TApk.%m9)v!ɗ #Ĺg­.{UqAȯ)ǰ}zM qZ^o,q$ ηqg')shj¨*hbg Z@aTCK*`Q#9"oPPGnCNjR9,/pnJhzK4.< `gQIP]wʉɢFYa|~ɳcw\s^C$3W8hS !dhЋuF|. Ǿ\eϒ(s] s+tn@́U#'"T,gaj s\VIGa|XP_F4G{;ؑEy=ٟ'ccW.6l&!FH.b&xє K([d6#cYGZeƕbnӅXm43$dI353cs.3^ 17k֌Ϯ7y LUb4A0M88^(ưIjf4brŽhUG.f1fOy5}8sH6@;0$4zslXiikg2DLu:|1:d4>`@2e= l(f j>ٌ\icg'3$9U|&;R%r6H)8[jM&$1>8Boh33FXl8}< ;QȚռf&.RfOXԀKF:j '&ZszDfS$5k`T#wVfL&%$@Gt֫!oQg6R (\m!ױ;ˍ&F|]ou`ʶIwtvEp^R7Ѧ{\(ۚHj̲XL3ŮrՉg\ Gb.FC5j؊NqQK` Kـ l -DlKk-z"Q 9jYPR9)'\oXu < qھzҠ' +'m|/qєĔ})T&_)9Q* hA;N,ja-dQ/ Aܗ,7)zK]Eu0Z|m6lyP7p9 wr40tytӅ6xe2qㄍ>• {.4&*XE>b c gT0*Y:04#4SjIgl|EA]:ny+۴ vo\댷E˃DD9_ H4,)`mI/kߨG>A=s΍A'|7/alv[}vt6xxj51礘 g$ԘY QY#l`H3≯: gPu u̫:bx` "xZh[G&cY椗R; T  qY_yoaee^9ng8XK=|ḳlR<$3q"-PnZ2'lph!@@\yvaraua艙NTѸij *KRFVE˽ T$juK f$!L2rA5c"(QYT, 4~pK{靯bt ZLJ/uOuuH{/^oKQ}(+p{`.śtv-:עyڳg_ }`*v ,aܻR*y"T1 6@++=pz4]}}a rOVF*R,EBwdX|S2,4OK+^~_QP̂v%oǔݴ?JU֪yVAZWʈi\BmU?m> P ד9TxҨp \lq" \Q~{+¤ʵpfvD Zor=z,"["̡h~d-gDL],;<[i'/ܠbU'0 .E7]/0c0NB튮nٽQ횖!\:do)r\Gv3>05 T]oxQwS޺D311ҕwnܵ?XveGfr;GQyF)Wq \H& GhjE& VKJ#1Ȕl7#qv1 24;&Jef2ɂMHY:K]$o%aqi"Ho'Qw& Gsh;C\-瓺OurS-'ؘg qV385(b [2ᅃ,6f9>RƯ~Ngѧ=8__tc?nV0hYCi "S2Jc7PtgՐ} PAovXy}C¿ObBxvY/"^w6x5Ygz_fIǕQ O^Uk;j a웎#5*0Iy ;:,@Obi8 /GEmqf{`..W*p}IEr%n. t%Lsj]= 8c3Iz&gB&J)K5i-2[i7~'6-Kqmov-u-tK73]gɇo}sMeF$\J`uqbԂʼnqɍ4XH8vRΪd)U:$+3f&0Z3&*v;s_kg}z<'y]ų~ n"i&€jDf" 2֖0D+X'D73(C6lzb j= ]mv劅GPZbEWz]ݷ)B.o ]!ZINWru+aY:+Z#]qyJ岚|?8;!8cD$^7y1NfY1z2x~G:ٞ]ZCz͘ iUpm MZJUiQΎv4g4͙4Ve5tpEkJD+m Q Gƪ6YWش.'-tho]!Jf:C\HZDWXUM+DiyGW{HW O~5]5-thmA@Ig]#]iAmu nkM+D;jNX kBFm+@II2teTMSxh:]!\ JtBvtЕcכC"OOW|b .5OKWODW `6+} r*h:9WaI 83Er:P68G1+1y|yJi"!m!~T~Sa0ONhQٓ>ꪜhYت2f%SUf\.TJBQNFU,{N:U/A ~Z{JM U")Ɵ SuXJ$φiqArAYβԙIkH$(T3&bAjP[deJaʰBEZ[7x/ lPo-uGm .ҕZ~ngzIŹQ,aR&'GntG8>ZK^ e]ßWR{s9J[*)H)#\1FJ̔Zr4l4/Ls ϋ^z&ֱ֮D\ѣ ] f@ ׳n'?'J 9 NdQCl,,֑dC"&,éP6R젨=FrU9(fp|pM2֪525-4qQR֍8.[VnTft5F;ZEG\-e\;bp\j[pTUyJğ0:3oy+o>}qU)X?&P #xr%'l|_Y˒.KWzRӿ.x33TœWXIכa5,/OJvӿR|Djew=*u?zҞN{UXY_J[{GGq"13OUJL|HJ$'4uX ɋ&2+ g6OZd2a8//!)A)0'I5&yԖ 2eV1'Xr'@T ƹLe0+}b8(/+u(U `,OVk(ు8Zg:܉<=R*3:KtO&ҟ(1L͍20ͬAT+ՑVPjiP,ntog*Ӧ$PÝ/_,b↖~@U \QŃ'#cQp1_|ɧPW,{ic)"z+`J(d29Ȃ<@@9 ]trL ǫ]iW)&ЗM}Ԭ')M [UMѮJ|eh>bJ&Pc'o㼚;wnh݃[:zIV%'Wz#lfRJʭK$+,}H<RWmTrTeLQZNpH8"-A-t7~n싽uGZ^%Ajkx3i f1I%G',J,h!dZfTgpid "A8޺j*;Rpѱ^5rvYZ1AH3 n )b~pF[J#$ a5=&.&.ӕ trc g˗ˏ?Zlk ;[|  r\ԇlV2 6nO_Ftk=t.4~ߵk7m/ikIe:XBL$r"} $Zn'd Vy8 <\︩oƽsǓŪ['xh%üݼ}oz/Thᬖ-}3:sNZd"7[H!jg! ^hh/ rƃb(:g'wr^N$H$,G&<$CU d2##s`.#"B+$wH&U#gbI=tATgsBPpGku}"զ{-/5=yYS`f\PtWʠEUDG0 JZe[_{37*W$x(SG>zUϠy.iΙ#Ȁ'-Vl9{ / GH.pސSEaR卥AmRO9|C'h1ɬ:wp.3ZC,M,!qE$WAIxзzZp(`hQj[vcU DVK5x!B3?g YqP㟯T㞧$m;һt~/mcOmCF(j{=LC dRBt '_Apd׷WDKШ"EU菋7+C*L`ix?gǧ'VD:maJ1E=dJ[ʤܮ' Qd1UP2?o"ite vtfCWpMLO'Mwmvzz_Ao}%_XxAѺ_/sŚn/J _''%6ғf#\X$mui fއ wEǕ4"b_ne=19'znVi[ɺ kWto[NciXhrqq% y޾xS%M;ڞ'KVW.[p.y1A}V36n@"lME.V"ME""i1 \xla(vo&ӜNNjT^z$tfp[(̾jGn1y7]1l|vY]ܰmLR?{&뵝u F}nۍX|&vQ ~A+{`;gN]%QË0xI%fnR9}Yvg׀WAL 97%lD3(AoG|s{~9*lų^6ju&Gj 7&E12;c# 0\G9(*Jp`\7TjbK/ǹ".{]Ϲ[J\387T'J6ih]kM?8 y Mjk,iȆya$we!O)(&< mL+T}V CwRe'}hTq cCCd=8%Vg2%Vpt ](OWoE?޿x;) WI 0(UҒb@,"Y.K]3ΣLW[N'ǹv"d9@v>qm6'HB\g=9/r_/V^vM{tt]1XOdYZ)zzW]Ks2Ab.]7C (tKRPt|6DћNJZ\-`.:z"t-*z(tnJSL,HnMgb@f1;I.EZ7Y$є}YnjIIЋ]p#?Q|t>^N۵cѯK^Jxrx Nйg=>w$1Qa|Wziצ.Hx>\ wZ3%CmR+p֥tTOTٴy0H*(׼ JV!o@`;XJ›dL<0U/c}+} 8 MAQ-ӄTOһ1Ld-8ƵY#sp=qV/=_XPqNY\|S{,כ+uvPV<ԻQ66gO^ $ R`9~KN [Ox$-ke7\X"xVYi#(iN0g0zjgsRXlU$k/d|;szb&EGZCMZpV$*e+,XTN*Ft# UCo${Iҹ he=< ހoǙ_P]B_L]A"3, u /SB>:s.&Q pncX.P?ko'ޑs)K>H\Y)*I16%D#4UYUYOsJ>qљekmH_eH~0rNv& \QmqE J=w$k(= "_=*ygҫ cu3r6t3'hQ蒔%jEoຳmog6? {V;O2P=pV䶎l9B;3nr8yng;Cq5-Ƥ;7oZ2VSr㾭,rf!6o)M\KZ5ƥ[s.]¬t6!eS1%|ħU|tٍ֎fA|'nuɗ?a/<r5ﮏ>YyGXo^}óbYl ֜wY Z ~gf~>%n#ڹ+v1pǀ!ZiݹmpYzGu> Dٹ/*oʈ}R:{܁|#󡎮·9:R-@`sY-A h͂0\&KZJ6RI6:AI kU.*- KC$Ec#dH:#Hy9pnW1%՟q֬M@pE8TD5QRbdxyuM|05u18k{c89"0i5)+N9ցJQ_J!NɈ:t$|ȶeIF$+KHO=_`_P(܆P Ok$Q @MrR yt3 /H3:9Av[Dt\Tnj(QwcZc3+ Z",I,Qs2Z*$exGD{;56Z{wϞچo17ȉYĐ>-Ƚ&Rr-ΪT.}Lg.#-z.:WXOxi‹߆ 3H@HKp~Ȟ?;rF:X iKQh\ͪͪYpZ""Ly#5J!J+%D H3F+]e]G i[؎i˴MtrΓ 2X`Fh# ,1FHQ!q(m".J m&b`ZZJ4 '7:Å˼P3x&Fd/֍C>r;È&RRIe9Xn5j'vjfz? f8 'BC6"xCˆ&JR. 2!R i8I#+96!,p, (;8w>z)ró-4]|o0-,,uyy43bN$ӈf0W |BP@6DZvrMN>h9U\I5ExuC[4U):!dJ@t2=@.R+[A8:i&R~1p8B֩K-]-D'i{=j񩳳zT%5̬<StkVjѵ vn09_¨7&NfOٴMT?5w7^.YfZŕ8E3.v8Rƣoq bp0z-ii*ڒt km)*Ea4S><;8< o]-\ k:'i02y'Z}MͲU}|%_Ǔ?}SA1NE8?>(dG/vȎBqto~oߝSfכwoO8SF;NI{MkU޼inMe'z6kE]nikev[n8_|QfP5_Bֵ#;Z&jă "_Aq_5)Uf'T*\Z>; i%!] _{~hŤd%誊 ֑|P4u%R(2#R8Dmmnl^=>F3f6}Oe$o$-MO)8BLb uI6 TWDRA6Sk yuyݗBTjbKbkv37>&gV}4^7O¿j =o=9QBO-H9=&2\&qEN(l$1 .7tʩ2O$ؾ#..:WǑ] (yXZ 楌4j q5l^FUislr,W:39T謏 } F9ᔕ@TiMz{][1=^=^9QԞؠ'7s_7E_Wu^Q@h, 6zZMbˬ-Ď.g+ǃ^e5" \DM"VRZ0F$'^'6: %R3t -I>"`vv "./_PZ]1X>,$"1#r>IzLԆRɩ5Ꝺ'ѩȯDuPA+OV&T%I"8ԁH4*yAj^kbB 3r_aR[~IcO ѸJvѸ_J/%өT~<83HLeSJ.祦H\Cq1IG*S{bʘ8e(z d#zDp;\%R]K {gX;Ygl+ mDzpNMFƧܰVu[j ?7>ELHEŜ"aqX/!kSR,hnt.IXr,kV\d{8b\+5Q'툱 "ȂD6]GtEۏGqE<wl)VjϾKp3tZ$cAK00rQo b4T Ί$n]I*7 Ԑ!7Zt@bPD K3A!p|l J2vcy9ZG' E"vJDڱDD%{.JM<5W̜OY\+x;1\G-XrGM81!]>댜%SQ.VdKczŵ-jQ@ ֣֠5{A5%x+]Xc#\܇\<{gWܱ<-#( PPEPV=rG* h${~L'PxTVB:{(-%u1BSbB$a {d_Oࡊ(<]J/'ޅz2xrO*pe`G7A0-d85>K-L%ʬbN0p5mK> V[rV[駌2g03f/EQ*]q{.:DL)e@p+Du\ .aI9hh2lɬ s &orK&2yfzf '1z ߯_5#_W޺M¡q';, a(}3MBgf'jM{ΡNT&QuvEM")˭ z՛r諳j\B YBA;Ix:{.noٰ70- 6Tˆ[T1g;?ֶy-S3Faś#nZ?EBWS.,'rcq;,HE` 笟y[Ok W`_RU֚cWH&Wd$5Z\=`Kaà'RQ+J6WͳfPv%t0lD^TnB، Tt̊GКw|i{';` 繍/SN(.Q~y3_5syϸ3e*w߼~Ž&Rr})Uu1 K10!T3F̦8awMeϛF^9Cnމ!1$7qp˗fA!^pR}؉O0=v!C"~.T@l饷eb2G1ľRorIiQB zv^EDϽEmӶQKGnV Nb!` s,N¼6E5I^bqww3;+ެ8ljg͢j[d3/L7_y4$'fȒSΈ8DHʹ-޺ߌP^i!,ԋ1-drkm#GEȗmY|.ٙf,M`9"K{-c˖V;AVaUX%E[\ wvw-+joZx9P `*p5o ]V:]J +dJ1"*l ]ִ骠ԦHW=.ǶUAoK{w*(ҕ0ֶ*p5`A+UA;ztA20-XU[c AB :zte@K&+*pQ Z}A|W/,.ڴ3X+*pukH J ~;tz8b<KgUjZ˞R T'ttՔi"" CWmVCҪ^"]I)|x#ž9fjm4M׍6ߕ ` i.1.h9:MBt4iZb6խ+ }'ڝIW(4g"*l ]5UAkPC;|W+)6mώjh ]:]ס+cJ`Gy|4/ֻpIKX~U اKJكհySΤ_xW+~*rTFp[)BdPU `{坍GA#n9>>HB7?>w9q,0uF Uo0W T ȝRZ9\!my$w le;oh(K|tsy,u撐#&c $@ h1(}* OƢE%Sy{s?Amn[uؼWw.%ގ̇9ݞ[r!U_J狲_m> Mw_m3/-7[2l ʀA[+T>ji0;ӕOiA[T:ʂ|S5lWA"#x.N)Jm`LY(Ѵ,3ϼ)U^`Rsu% KI 3ހ%PAH-YVg rY\5\18|wڽjZɳ6)t~?"k{(%"kd*0'*Gr)%r @ 1d7Q)@,S.DXt*'W YA 6$ br%4I4Rcy|?S.i\ЍIq~HݜS/,L [!rXJj< :Յ1-ɆtŨJ ~-ӝdrZ[k_{M}c\oKFY%^r4Yr(*;X|, "D5.f4ʌc,ʘpL0Rl=.E3:%MItHifl ؜6%rm MDž{%lˢivr㇛\Ot0Y?*7> 7؁ń<*L*PFh \E2k*%0V:ua]V{.r&J^aFɶcf6%%+ei8c0ڍIǖYYڝ;AedZP,+L22ٰ6b4,1gk[o,v Hr* qMLlMP8GDeY I;06q>U1b<FlL>eDhcĎx,CW@x 'Q*R{όD)T`>If25@M#I5\xG &H7R]G-RY|183⧳D/N:$_gcR-/bü;^xAXoTz̝O IN"1Y^肖xqx0Φc[> ܎@aۚ\O^#blƳ(Q||[T<)J@?G6FWgQx[tA chW?Ѣ8f9Hpڟ^-q,mDוVMAE Mpb57?1k`t0Ym~^͛q,=YGoqDO6/lz+7O: (qgoPlw~Dƶ-:Y,'C?TPr}zGfp_9Jےr'TX"9W9MItQlmo=џ=Lh7Jr$fP.5ͧI?OzWcBEz;ex{{ olބ#"*ZM@QfLY1,yrF$0sٵx =NG.^O?ҽU ;sx.K A+Ȥu~q>ܬ_ 9x/P998RRl2gVa(AQ*zMLrq>p ل /mxyH<.qьm܄Y}#d0+Ε ]Lb|, D!-Wss+,mͦޙN֘[7Sa,H r*D)CJIaRi 3F+4F#4\w), 4p(3x!dR1qi5'r,6 m%%le6 >DH#?]CZcN,.?ۦT&[w%ϚrEwIβM6e}J`Q! QЕ{dRL:}P夜db٦n:/ֳP1pZ%Agmfc iF$G!)! Ш3 =u>E[KիyRɚ$e g2s9\[FZ LJF/%g`4m2 j` TR!#@S箒ҩ ,BހrDjv"meYU y< r"·r!(.WqnCq͔\_Ś/:p!}w)M{g~(AI(DqL|oG•P'OOXtpR6zK7'~y;sHoc8D0s$6=yzdAU:vR|>G`(FƏ.Z "$dJx~_[kKIr sƒwURkV.k~8׾9-Y:a9\Lh>ީwj_Oj:99_~TQVdnt09 E].ڿh Z壍^:qVOoԨYFwwfX9s}lU08w;˭#FKn=a? "(-7&Yp^\|`4?_g &jTT<b1YzxɤǺu uͣɾA6$ mܽ\{] FVr(xyHVcZK=;Wճ翇p8IX-,)FJPL&)4M4eȞ$M//Xc2Ց{[U϶;;~bjeP|% ycIYܿI4.q0gMhYDUQceۗh'm[65je#)LF\%yf&Àa94F0 oYDI_N ºYғmmF2W0+$%m`1!s5uzVr% Psr_q#=Մ$b+W YB78Vr՞NyGS_W}jRfNqnnj ;J9Y UN^W荩rdgyg8kdə0֞me6VAJΥbt*R?O D|҆tr,)4NH=NJ|L*{K/pԤ^b~ ~M 3/⾇}em[[=z51礸 VdC$A]j)De .tMRj8*;F #Y2QPG 1|=b.r'c> F?D˓b4%,pr:4J Ka@jNÙh\\Iiy$ώncp61}gMAZ(Wf [x2#ռjxyWբ'YR;MT3 RGSjQ{A˲@x!xz;c-t1Igښ8_ғhɹX:W|\*Ck"@ɲ=={ u$xwvwv޾(S"֠]r-LRXi`B% ɓ߮~]D&qĒ7t2ԒHCRF(Ҹ)Dj954;qց:"uZt.Fn780 /!37nO'y}Y`ݏ2duCZ7fu\Sɖ5RB'ל($. [9mY}: %ڛݫYE+;WaePf7E^oe=V5%>"xY[{+[>؅[7oڪڷUI<^m1P8;V1IS'mj]KD "nhN>^ecZ^Ǵ+i$*n2=G$%inGܭSᜃ>dkFյoHފm ]7U^E[KuwSp"!B‹G ?KR* " y$& L~.5M~\ѬNg<)ۅ*[zR~p&?jiV'g;/_"%O,>:hN}梂Z3xȅƕx}yjh{\a9RNު4FXbݞp&̀(?O߿A_Wx?rx}ݏZCڶ=)UgO‘|* y)YHY}gecWၪ jo墋xn6Cm[gey%~lhזuV\rag͙WURB{Eg&g5l2ޠ`ƫYk4+p8ӓwciz}aFt8_ TǪ  cZK -;Ł9K-AW$C'qqrܮ0fvЊ=+ ZŁ3&ZA5XY2{ mHP &a.hWڄ bp=sW3Vp]Y}u xnx0=#Y 4Fcb+O!r5jT +JMJhR F;Ajgz4t)]26ד61=඙[+>?rE\¹ dA _pi  {9(l<(AQQ,>Gky!]{0Rhݻovu'{K'Wdܧ|`BPn ֗`o3qD쭎YR΄s"RQD'ĴKx'+KV[j t ̈́B :H@ i]\D"Fth FΞR>;K4S2B0 nkWlsܲ;-N盧ϧSAs{@zkKA6){ 2IT ވ{8tk 5TGdIǽ%ƀtic ׄNit 5i3F]zS900JGڅq7 QH)AgIp8\OQDYYfϤijgun'3$ BpgJ*܂ NTHRfv(/nz s7l*.b{fp=c#w\̫2EN8%x ނ_i 18(a$E41&=jkj Ƅ R&G Ab$빱I©I@eAxFzíiԷs=X\]Cn^׌<$;!['=BBhF'9N|\;O%R\n\XowCטno ?@'zipPon|)ȍ&~zx}U܂yz9B%Ut>eg؜Y9UQU |̆WI`|$GM.^ o BC@J罨]Oy,K]G6 v^\AUieo*w@CE's"Y*/H(CHIf-i#\F "D12iXgPjvE=v}kFa~8wZ|Všޚte4-m^si UW伕=&Ѡ~iUY/[Rd9 w.isno"6"V99f5d|:Y)}t+ڭ̭qtytY>=.dcEf5a[٭Omyo|=f^-u?;gν~}< Ȯz}鎉177{婷d-f}3ܺN a}It#aOyd_PCL6_LCLQ_JC$VSz **+lbH8!?S詂 5l XiBRq\ 8:TLѵR P挥$eё˵̒4q :AH$(T`֜!*R4 :"E !1JIoo2ʘ^ʠ~ؙRd^NM/7O❪EI1iiApgPN &Xt%ȯQz_$qnFqgzxD(DNmPGN:7(ڒpUqA,IKiōXR %A'HK3FΞ|VG IOӷ6*>*j%mH^[ UHJ^XES,b7qJJ><=D8 'vZ$♶<szZ~1NB rO%'"MuIEA uU19rE`(FqTz_~;%ލQӈ$Zk[nFpۺ$ܑPcfÞ?cl̴Á[4&evf/*ɢBa]׃pp2@jocڡ})TóKiQY^e֟G,rE배C|5C;m~FHYQVȚ;:0Q{#X,LIFQJ_؉]zGo-=8E'(햦Z񒪹dkmظ#Y.lR?PiҸXivU"YrR G_ŸHufDto'踓aqTGGRG3d4VJogRژ&Z7TTjq=C$d5hjEEbMU@|rBy=/cu[ʳԬ]t)agϦƝv'>ftp:;rѡtTǑT&JTMKZmxxOk%F:dUH&@7:k`k@@jJ #hﭱ6)Y(a֊@1Ɣ5P%бw%EdK^qVO[' 8S m.2Z #ziY,qlӫ8fj mLZ}H3'd.J"12ԖƔ׽#AS̩ ӂ66Cc4[E$F  ,Dg=&B@} u ޶XD-ȡs ,mg5b!#:\dC2qOva<_5 Z~q4RA8-M%2*Z[%(TRB2\4؀"GڲT~k'ӑe9q#Va[ۼ~ON (jVII@Lw]Q_(nCU|&5NM继ٍs`o颰9`;*#)-4,h\lm9"\`]p .R  xfՋ3vt؂=2]u \U8t â+сHWV=W۰Ք]'K?KaNBG|M&y3CZb68&~gy_nϭޮPY*~E%%8|^\*iޖY>T[s:(UGmzg޾`J8R_0R܎Wzdž>i#6]bw5mmkV}sIjfyj*S]uU'gZ_T|*]%?c׋*Y;>Vr|e]g*sUW*2W((]6ËjX3w#5Y^lˍٞ2ޘ_fAۖ~&P/`r L~ۯ'j}išbCܣ++&]H{UMm ~ b_ˡUrBiZ8Aׂ0VpU]`#+ e6Еu0r&n+|tE(6<իЕJےQ+kx)tEh:]sNΔt1tEp_~]ZC+B)GuutmRfIfe/,d+Z~um֙yZu?̪۲mB䛨paA*d@ WXѕgT)%Δ9w|{zsy;։;cfL)l@̈́RO-hK|`"˗;e+|ӡ+p +T9b6DGGIҕsBAACWWbuvP*7CWU/Ϙ3w.-SǦnp= -I]uBbbe#]Z\s% .4 uԙa 8[СY4΋i+l)4MhiPHӧCӒvkY ]rT%>]J#] ])+00S ]Wu2SztE(JM)XAtECWZHWBW[aJ+,.,F]ZNWڑN`CXi0G[Bj'J a#] ]PҖy]93׉R =r"r]"]9a5/)r\ךR:9tB/3^1vw̙:Znp͑75)J.@WjCKV`qnp--_C+B)HWHWx:M ެy9:K(>lgJ̠gghi DX?]֨7ԌhL4<:Y>hhhLލaAo7*OVn~?Oo 7Fxkz..9\wkT))~kw.}'WnOo׶{:_]r/QC/sD<;x7v>m y__UxVzN~ΦZlKY,>俵OwWKvԼb nj-[櫝KR8~`k7GbNI~r>7imI> *էQ}D\\3 gf{\KU5G4*F[c-Xem)kY(a֊HVjV( +:Lzfv+"Ҋ`"[[' 8SL&0PF 3r^vd8b)No86TU"tRKܓI2iw=w5 tϹCH"4a%mˇ%4E۰ ͢M,P?j#b h"RKTzF=SPl[-T[ ݧP_ @9>rnsuNaWC,dD'|>8dʃU0*lG!D [Qd@qL^'Ǖ(؀"1ڂ/Wwp'hO3)gVʯ㬾~䞭'/[F=뭟 ݁q ƌt#`0PLL7Qn8z1݈s6t`L7~L!g*͏-+iC5f,^Eg* +Yb5Q1@%"-A+>_4\(Ϲt`*6D-G _7ScH̪,BK R(G:-ZqS(`;d-z\i> xHZb˼^=nڑL}Oׯ汈CoaOol\{x-#Yj\q}I^-WOMD_w#BxXUTJ,BX=~Kq1(- (2)FUI;(N zv0}dwFlOb|4Hjuԑ'5SS$rDTL)&*6H\i8vCvl9GͰɤ;Ƀ&&Qn"7Hb"^鶣gfvabn jk6%yjw AQy/L HP%JDm }X!h$kxkrRYr QĚ j/8"F$QIQm[f<,klc bk."Bˈ"vx;b\WfPb B".Tu] rP477309s Ӕq52"7aQ{.rŬ,η5qnF?C\ƵMsiɺ[EbhqZF^$.E5I ,4<8'u^ m u \ܚζc]<-Ά4c䍳b|oHPQq(̦/Up87Ȧo^{(/Ri㽱w:}9l `>~yjSǂiϩ"hMH T&FRiEЁMܹ@S䲫l[2纴Պv9cqKrzvqiAh#<*% PGJ ,:t. xCzI:om> #d# @.dZT@6RXR]@%J5 ]C/㇍.j=ݞ]Mksԅ0P`|G&k-"B+ɝ@n]<M'%~`q\zs›H5Wwd4hcz15Axm^-FE ({g^ߦ6੒*%X(%^%[(V9svhd"[m[!]J?0V5ݫVa`%61)!Lze5BbT*AU%՝?w!O ts$o4QQaeR{ACi)ΐ5ub#4R #I6}^I3.g4 i,XƬylî*w]Ss8O8?eo>t=NT,N II.v.6N%]LV' ԻÛ~O䫆IG TQ0/NaQ5PM$';20BwNG%wdO|g2E<ىӛ֒DQGwzB1;UByJi(Xαh^(kPNpoF6PXx^D+m)z%;Lv&_y5`;0U,_moq-iT~7aqWyj -Wg^'^̦ aK8_4뎞,׼nVlju廢NNzm$7,P}1$Td؅(FլP5[=?Nz7}[y߯?:zP}r q=0>nA{^\ p޾i%j۫fߢko=[kxUal3!4 //*UUH+ye9jVb^1-(6IU{?)T"KI(!Dü1YH3?(^P>)᷼u0E)n]fw`>O#9ǨM*+[Ji)-&XYP+RmxھdC yz[+2CǟLg?ɘc067`3Q0v71""S@tZYәt[`ky=^ΏQqҋҎiU0a;Jak ۙ#ƻM'Z Q ”QSh{NsKy"Z`.q=Cnz{9B[ IU7gݱlY1 xsK9g9(mtk}  鴾O1xbfƧWKK+)EI7,iʃ֑4O.D*p)vDȺm:`~`l918oRY ڇLr PVOLz)*& cLPQɨ9P3,Ak"o n@o= lބ:< Ƿχ[;Y"<ۛ7pK gx֫vN~p!XJV8!U8 {(.h/!G!I'l%w xxz:=h#MG"7L$%(KɑǺ 8ՊsCV'uY?^\ hp>0r<~G'2CKi(!J2 s\J`,{DgYH"/2h6C&C7 u'RԶgw7 -6@<NJZ Tij"tx * $MS:ԑ5`|A.嘉$T?)jI\G-qPr/i-rKiԆ7([g(м|8Λgʭ{/B}nd;Tس{H̳C<9_g_ӿ";6p*ZȘ$kޕ#%1YthvzP-<ܛQJvJ;Jj|R^t79Y0(vUǠ- v˼աy4ylU4yӽ}_^ZvO+_AW24,CYWEا.>;,:8?QTI ;-, Z"2"|u^`\+{PD^"TkdY~RL:gd rEś,ج1O˔xi1P8;9kJ2MU7*:0><^Eಃ.[W=02;j-dW$)YFܭR%xZrm9K~zɪ[vĞYy0_6CU]Ysw0W8rj|\ 3rtGar@6=0L>-x+_`W+]-fEؑ<aUxTbapXW]q!s$ıJcM2Ru~<^Kzҫ;W ]r9r~. #6}Y @G"eQsqIjTYrNVfb ~a{0rxOlq۟Bjulap(R1T|xn>x_hmw٨T Ȼ:kiӼts{gkdtsSʀv, +gd~{Ww?,闋g?|<"2;1H8޺qՁ{޽Z8mYZuw3S9?4@MGЁ8q˫#V~Ennźl6V>wD{M߭5i^VI'1Hg$qbA.j-hiH)Km.W 5Ѿҫ* u+ ]:b dDeX++zj=x%[*9J( Kdc[ z:PӍ"i}PħMZ:=E#Ja HX|=Ab}#:Rw?]^L\y%ERiTO 9Ⓑ9eGdu|ɏD'nb8tg/R*K #-@0/|]w2 hWoN&æh2ɰ"[AJk_K|@*LZp qۉؔIzLԌ]Saϓ Sa.Z*̟ЁޜD*,d%mϮiY.[U&fTJ5\>>e =7S.^6L; 8xTJ~3TVFUđe: }]+9E]n<]tDۍ}y~޲ھQqܟhw9m1O`+.9m%ڪa RwsV#{FRTHݑg*S3 x8υT4IOZI^9BdBV,3O=|z]"w.,Ѓ{ݹu뢓o>#%RClZ9(g߀_r@cJ0H^U1_L'^s/v/7e^9ԽDzt7k3b˺6llٯXşxU-@{qvJa%߇@.#s}Y6@ ` #\>#45$j<ڧ33 'Ҩ^><̼_1{_ZGՊ'9mFnwtC?NI>Bp3On@t~QoDŪ1X35%An3)&`+&x!;C![uspE Yn\Z0R bvz7hYz25w۽|PZlcKJZ֠ ]*6#[f8Wz%SqqVZ,- JލcR!h-ԃ$mH*D94KҌ9&Z.wN=V؝ كƌE]ё%%5s"bb)Ϟ Pf|շ,ՙG5A}.[MxqHɏ 'EyH!I#xWʅ;6>{5a*HaED(-hbj8AcƎ (4y6 Lqb ;Vz+w(MP'  pd-*nFݧyamTgQ'0SQ3%j@?x@3;M#\k=‚y8^1ۍuB nB@6tBsh-^50n{#Cb94bX#ygT=Rmުi{%E$ḡ(ml!@h W|goG x1ja6$ya%֫i11ÀP KBKf]ז&ԁH ؾ{fxаnkusY]0V  jsX9EsP]\G F xr>Tɥ")pndXhq5JD9`^zBJ@r ts')az ፣x ?aq!DKYɹvKµ8K!$/'Wlf2P87hcd4 QdG Uk~8_1i%c6})4lS^am:o˜KYF6oQ?HQvMI6{tbYXc(ﰟ9KC|9!n2/ft ͧ?:pw^Gɘֿ r3wSyu ǾF>eaLJche~! ?6+/z'?4AԎAc…y[q(a(lў%jM^L=) X9Ԃ8o39}c _FtvIv=`p\M4>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>Oz^>O|A}tIqɾ>!mȧ'=k'mS빼@H8׫Vi} Om uxI..k eꮹ&evyOiUmG:㛳_.6&SWgեv]YsF+~qLCPc?a<8 ܖHmOL$HM*+Q_Vy?O]bZkBWqŠ߃eoSHo;!\M0no7C8۲?A`/8}( ^$y44rscuo~OAlmP?B.۟%͟÷U\XyY=˥grۛ\8zQ{TՍizTJ3PF$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q5I\MjW$&q5I\}jNI\-:q5PzKKWKi>j;܆>tSh\'+P~2zPW +:iJU| ΀O+ 8^68RWuցκ*Š΂QV{ SRJeRlV:r3n`iUI#[G+^o?zЍWl'ׇ^6[Sb'DJpGo}D$&7IMo}D$&7IMo}D$&7IMo}D$&7IMo}D$&7IMo}D$&TToe8ш҃}JM'j_[DO&덿L^M3.U!)Y 뒴hsg{>0%ZÛ+t h} 8y?U.pY) ܹJpKXJX'ɺ1!cF<yo9'YsCEL^>5O7|ޕ)9?o Å8 |优6voOo-pJ0J`gAJ E02g"x`-kS<웷I&@dSf)xOR3e:NC(e:L2Pt@(e:L2Pt@(e:L2Pt@(e:L2Pt@(e:L2Pt@(e:L2\N[aػފɛǸմnl_ny]PZ r0YL`P6 h3PzN.11,/N._/,W=^3S%\*yܤTV,P N'yX֧/w]`H6cC6wW/ 3=8R2WLpe\;KW(p.ә*`ڬ/R*3fS*Xk/h4[kP5%zjtI+,,=GGty6v B9Bg̡ ?GO6\o2e~Eu:'LUM3h[6q6<NP6֓يxrEqE y%-Gen~E7k.l;Gt}AEcx6R;Csg{ ]߳;(ݚupg wھ]Ǜ =(q0Lv#ҎŘB?蟘=/&gJҶ74W_ݺlKy>kfϹ-{U;Y;]qa8_jqWsxfY地X=Nr뛺a[x&Zf ja(EЙ26[oNik.t>6Ɩ1yR%H}YJk|gst(s|~Xcb'@e{8>3-׃b]SJgyrJpq ,,XנXbٟ9t0lrPƇ$_F "Qœίnxf0-9ǼbZpR\&OɂpPM-so͍Z!: +Q8L?$_B믠f/"0} ût2_ZBkB<>S.fo4J #0\?>}x<7^i+颡il,4h:M#J+/DW3 ]!\b+D+ QJGtut%SEDWx•2BZ E%ҕfܸ [+C+Di. SZو ; ]!ܯzfQth 'D5ҕek]!`O  ]!ǮbDWHWy#+|;5uj_DWHWhn.>FjGvwB_[b+ײU7U7^:[ t刮z$v518.n2AA0BjdzN?uz.^afEje^yjdSg.Pi(rI~xC3 dqfb6h0}6m1Ta_2qyREw[zYVⱲ27o&`߽mfФa2r~[?^7 ~GY_o ?O:0K1am#kT*FUZy{ [%or: Iߒ}ٶK$?Ds/}դ_*@xRU0:i% %r\,xEUK mHLtTU('AXmVSV薯n7T}@[vޞ7}9Ɨ>ۼ?eoI'۫z{Y+9ʺ`qS׹g]\;@:uV?P\(mEOu\ce Mc=6+͛jc8o8P̩#)U#y?5l:# Q"` Q"\b Q"Z+DNPC\Y/cc2N3G@4,kQa<:P|J' h_Bo?uKq`$_"6}Av5/mxE{m;^6S/ݍsɾyDG\*-`tBe(ޜaRzQUĉB GiV0ҙ(uM(tcbGN#;á_wNq]6MgW|)5<)S*uWiU*25+qooMQFY_TJ*&T_0ﳪ,R6ͦؤyсw a0:ߔMT΋OMuM'yZdWX|7o?[+pSEYЀVx[ V\^_#pMT4tuew-b:xoo{oG{^eq/ӲxsyԣenA-{F޾N*K)R5H6|£>3ajcwQ} ãZ3Z 7pu%U mjݾB0yD[RQl]@ }Ԝ.\)&"BNGCWSX 1tBĠHWJhTDta< hVڡTJsc+,cU>xB]]$]X\ G% ъlDWCWVjwS ~լGnt($@r5hç+,Ms%_q[w󷯷M#`磡iY44h:M#Jm/>7^ k+T,th"Ji*E0µ":crt(%@F"+|Jb+DCbvHWZ0O E]!\#c+D PFܾ'2RKNeh\=:cZ%F6&~8Vމ<#=ÑӪnZ)Xjmrjlд)NMPתg, `-ziEIU %b3c]z|d% -`;ˣZ@O?P -p KH(}}P U6:tp%1F+!,}D;Bǂ+T<W :E\II;3VYW(XpjE 3x_F`4\Z3*`]"4d]`t4BƂ+P{=WRNWFb V4\\b52t\J;XW[@0"\\FȀj2*Հ+~d30Uw(x7 }l%WqNm_=Rih`{ZTZ83T$(}frozlt+oNp+oVj cU6>L3{G+,W(YQB}RNNW\ &xpr"F$+Tn;tp%6D+l{䲝\fb١ z\ICg n'Xpje 4d JIL 62\\ABL+T)Āĕ6\E+u+k4/ՀɏWxX7gʕь]ZCCr Z0UDhprE4@O~

+dņBDV)G `-EL㠒Vґ#I΄`wfR"ZY|'o;>*5Nph%W X0 P$ :@$z J tl W(XpjETuuhum<5Ƃ+TeBr)JQ.W Xx;: *Cdt JS)W(X ܾ7{l+T)ĕܤ$"\pr-WS:Peh z\Y G+銁 HJ9x9G<S],^NW5Ji䀫=D@}vrWVq*puRT4vi6L)KM( q)-VInz XH Q`Z:v bsMdLƃ+M,B*xU=q%o2"\<\\Eb*t\JX!1tp%W6O <'Ʈpr9WV J\ ЄpW hpjYBb) i%1}E2/(WXpjjcW+#91 `3rWVq*puRs 4\\AojecQǾ\#^Ei_[ fs=lVd]ST \W-z*4k ~@Kqa;E|1ҪXVQK!K.1ZrxP6g9UƶzCsR ,)F[i at$@.h5\PɔٻMW<lm4C Y#rjZ8šic񪰚6TeB+K@;S1G }6&Y^=w-udPZKpr=.? ;!Ӣt<(BR\@r'!9 b_P2x>,L^onb7|os_ͽו>}Բ2n**.߬o{wS>9#Zڂ񂔚iH :ύײ3EiyN 1K`&_h>n4^-|ZY~ވ3B Z\8&؟fs7%-ڞ`rYϗ Q=-Bo zjQ_Z.tEؾzz:f{~{jjו ݬcx[^l: pJru+.ѸvDs |s/ɽun}>q 8H%[~+>z.Pn~oݢeЗ3}:ݼjm{㾺ѶWwgWھG! imx@ Bq5> ,۩dC}1Jj$w Z}v1T 9[;U`y|y) {k`,(F(bZ(g#? ,0dь 0.?7=QI;j^JV1<)E>erJ)ջˆg\ #t?]TM4mXg2b7ցBlIU2u`hEQPAKJ-56upƦ*}ҭɉxy\Onn udx;ޗFnLyнqK@- Xy"rQzDC e`f{ j~S B)M.^ _r0%/傩2;=5Q91Εvl\w&q]բyj>Buv:rnosxѨ{QV3o7](K-/ @:x|^(k͙43%ԅp OBu500XWk5g+qD)u$>/FQ.fLԨRgyrq/>\dnj_T2Iim``7BM^_^/KfPs*:v!P͛#bMV6g2i;M{jnH?;s%O5_~iq54tW7npOML.pAan9r3=8ZmWj9gM7uB>]BZz[ݤ;n#u0z9j`lǬZP۳2㿵4`fDlH#l=!fZ->.%axxbM{2fKG+1QT86BMRWMR3Z|bItfiZdPp,:?)kuŪEN{^_XEѺt;E] `snŁu~dz 4̒8@L Eɜ V(8ЂRp79+58-8E.@{'KaM eM hvD3NEPW eO ju^MT-lTyc:-,+U0$Kp 2\{Bi|KN- ͷ|P=o -2; \Ȕ`q (tX`se$r-_o}v(es/N,ce•?]]S+oԠL)5/w#6˝ؐ)|oƀihr=LWQUL2Q)'p,1^H58ߜnFu?3D\C]\o>,+H@vElw>՞%J6`5m8y#6%ܜ>_R~Ń0ӾgL":nO[60*76,hgf@0\AT "WY&Dl4(5fUK,$= >.{%wN~ hj[B:gGSZSRT&(~! h:$i&ľ=ز,8lvؘثlٛ}wP~G?scp^WJB] oF᭨࡫w+W_ZOV8|p^o:0hy(rAcm*y*ij"Z塷<G,m(( MY&3%i22e NM(#9fax/*V*ޣ)j,&0 NM4%'ٜ7XlM$lV(ɘK٭m31U:6 d9 .tYYLff `,X.&:Dz:r:,,)_QcwޜR{rnȩEc*b|I$Cz)z!s>4f~N{(;ȡT̈́{9\̲J s'ݙ>zy|,8%T.>t#ZԜ SMe"w?+Cˍiob瓫?>pX0}s4^_h >5ndW_%5հ@aJ&YS"Tu`=W:ksbB\lQ+tWcLJeo寷xSzrvtyhysM~n ~ƽql3ɵ6t3\`6v3nՋFg$T@ׁTF V٫ w> f;iT$XJWЂ~ Lq`]t5 b"+g! /E}5<$co'⬍#@\7I/ЯZK/`f?>9~(HFy; {%;#ƳA;/O?/2ICO:OTSEIO狞T;Kc,cK?Rڊ2ZR̜cd]IΤXJ!/<(7My nW͜_OҤhb퓌eL`HlI&:m(Q 2D6gXw^w_͜`l`}q{gQ|7ЍvtWJ,0=גQJaj"1BMF f8$ՄCi@Ӗ] 4lH:HCFP(c#5ĝ @c1ŇQ$l Ţ"/\s)u8f}c[%Fg3guG!j"87D5k2VQ)$B"4W].u>?I99~_f[tz>xNod?]^_^ջd/t'Y[A?Ef[ԂיHM)҇aJ߳} Odߏ~S0S/Dz?l(tL]^*}̭sCW_o3]ҟ{WwGZ5-f4/+ 󚿩}'ҭvGfHr6dmlĿ_?hk (QI==)tukVQ9xM;P/00u;ߕg⾫5W_ҥ +<|x5Yr7dgwp-+Y!}k!AF&dx[οng䣦]=(ӚRID-foZ=~8.\a>kѺe{\O5sۜG0z룏o~]Ƽݺկn@ʽľu׏tr Oݵcq>",@*:z@aWZi W7 Zϫ[yyp'ؕ T&BxpSb)zå1bB;8.H:,]8V- pj1璥:wW{F,!˱bb_TwHHԈTJ2&9dm,oAђ^c!!,*:4|Cz )ϲuf/P)dqQȀ$K@NJ`h%`\5daV,iI3lv749DQX%$ yq+"jۧ zCqaCd_狃oMw{@mm6br57,Hp".YaX~8c7eFȾA~E N7W)|M9 9^.HXj^6l0|,;kLHz+M⻫>f6L4t6S @(Ҥ.N E,Sd])(f هNV0F5XodBg~esCւ0w6~ ܴ 6r^O:ɾtw0p+Eゎ^ \ziݯ\7|>`7 N,4B;M#CVPK^`` eP+a1Hb҅SW(@[2@ deմ&J)s1 $[ٚ"D~P A@d1 3g;ԫ.un>E 0DzDj|? 5%o\lJ1%;pJAEp<%br0LfMMlYiЖ#$jt@AVKCh7\B*+72rUzEV~B(bвJˁ5_AWG;F f&J`NaM=7 .?݁t[o^vn}-ܮs"{oWIޞlJH+oQO˫/y|rbujRh!MJ+-T$BOOdX(SD"8۔(UoRWr\,$-MԲ^X~ljIRh8SaȀ\29$tF86xj9;!ɩ~7ݺ{ ?ǝ{W80/#ûf^o^:w!a58f( p)1,?!0`f̫OgɌ?$ۀXJi<"v*15OeZS\les$۬I#(Zmt$D54ŔؤC4<=fl7f,1Fq#g#YS.E_ Dǫ c;VL ^ԪĒZ'PI\b4TGw2g2wsfrq_(I(ISW~|M!qB/  ;t9@6m(·{S\!$5/Ȓ(slihd s+Ő^;qi#Tgaj r6. iC02.48 5ѷԑV%9/\k6)>=yh IHPAI*(=֚͒$zJsf0c^fg|)λ6}b%G86 $YL48b3g{.g|+<=ltY`%Y*`4`%P2.7a dXN]a"./ C̵Z Yj]5,FsIq4KUhD5IDhalsRߛ.NnFD;8"%"._L2*XKe1Ož\ѡjo6EP[HV-ƂmDM#L]ioǑ+-X`ꪾ#;q,,"՗IW8-Oҫ(Brf8]]]U%J ؄[,.j%de(fNPgԹ\lVи(3E_3Zr􍜘zܲD$54n.йP>V-SD\|2sYC㡛9>pa&XuSͦ' grfz[6l~ڎv?]Ur%.km ҙ@K䘺W /{8g65 _=ο͗ H/K;ǗY/{ڃxw`XGElץukНXd*Zaݤ{߁1iB<{Tbu|Y57ۣWŎTOmq:3up1 ^lIE~W7lbso;i1E1!c:2[?L\tjypkgћtJ1-kʅbš2]#.uny{Zvp2'ouC.e=;S8uYc=l=rW.n <}jI32MndoU.4ytx+%ޟwFY oWtcj y ߼;9}e`v0 }oǝn9]/Nֳίa~kc&Z"O-fd3ևf.p ZY`ÙWm9GjV:9}A7E_WcRh|~x\G[a_M[8̺|sÇzl>l`p~7oǿ~xK6}od@'+DpEg p+/'M_ts5Κ(whFswO~vv?u K_rrfw96ζHWpk)N8<;'k sp~KfolSJ851: P7I^tfYYwGO l63W:? +=FIɋNW ^<ζ'5srdg>kgl#11kBٸꋶ0n F%W 48|d5pVMw.atElmʔbۜ} fvpqzJ|8٥;\2Nn?9]gwkϳmY\Þ y$5U{l* qóiS˽/r 9>">^|x6jM!%pCrlWs WCZ)cz W_aRg 烮KFdR.W}4b]Xcnmn7tt#e)nM; (SY-/ߌ"Eד60XhŶtX+/s^.c]; [1c#-'&Zwh9Mct;e+.=</տǩծW{B)u;8N5YGMBQ_ \Hybfe JK95{,l]|䪎s&7x}T/Ÿ.,|k'"Y& p[ѱ |yr{P3qEJŘD$- :5D嗲#5\K]چc!٬Y(665@P!ia6:*y4jKe;.kSqoJ,~iPKZ7a&J5K;MI}VB줴2QQZi)א@)[ݹRe3Dcknx%Hʵ 5m0-kmwD1}p`X30fw's׼.+Ei1UuƽtEŢ ,IYouT SPw? umur*DϿKo!sihxcKA%xٻb0ևy"qs2Ivr]!jQ#uʰ(:sT|c<)H}{ܜW .V-zpLC*)Jm:hr=QVk>C۴wR3DC|KD%m,[rEǑ֑-BY1 6Z:S9N!\+,:T" PyQ%*L '_p5d5QtZa HJհbS=!nAJw 7 bis$F $"" ЄW'Js `x#AIR꜁4-~*(ӛX农͎SPG"zn v%v 7wVrEp`܀AS@duqsa)[ů>8gu0 cbjc;H iJI#I0ΜD>(X<#3q_dU-5T3;c(.Hq‚̪Y1;J@;hjCJ! ݚ]T1êYQ@IAPS+yŜh5&ЯdQLBDb; X,'@H2x{0H6B@[ i }p38;;N=b]jҘ1뒐o0FE*Z9@/9EI0g=ln=֡kE7A\J7@uJj$9GeRLe1'z$;uKf:.ЃVJ# IVxC`Jk0Ҵ0FƁ1@^"VYZ-rG-8n.̊dP?5 Dl~L` l"ev*,}ZXoݷ^ê]D,ͨ 1]pl3oLZMc.)hNnbVnDeBI \+;@1u@ @љ R{0ki,2mIAV35f` @$7D'^BjڝVh͊b-{0Z =b8㨜 2&e#k0q39L lca: 3N bDi%) <(@"GpwF$ תqVc sF,5(rE< L1C:f60(li7 B*Xz!O hoV Daln |+Z4Tf S 3r!.e{9\/ӎ ,yc8PeD0u溳] ՁD!Mӿd4QG]Zر֪1EAΕ`4h*DyhMŒզJqျ)A/[]Ք+9P.WЍmd$:N,W9;d*D+AH(<#YV07z }q _n3!+E$T; xrpO9󁃮YRc0/̎hPI|px$twͪ \, ",](T|]W¡{'Q4(B L5zDytm;)񂻪*L4xJi"J5=~YƘ@D*Tl$Dtr:7~XϺ+D|[#<*,xHڄ8.R I sDI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BIK [z<$朰!@Z)0{$e# $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $@H! $%ϑ #  d)'I $^! dI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $^- ]#"xjq5=Hk@# IH`I $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $BI $^ t/Ü5^ΛjZNOCJq2˻` \ǚxq\i k\)EpO.GzK6+$/Y&d/vVQWIia+Ҩ}T"H]QWE\.E]i;tuU\Q]&u\{)u~HgFETbse9pM,yZ#HZG `%{R3R̼RNb|2u&Ww]ͻҖ'GW~}30=>\0?s>=8)7__eˏ@ 0.V08XU!+%#\U1FJ̔Z_߼/2 ZGuK\\4RwV\V%{ Zs&CD՛@bC䧄w_S# >wdEJ 'I.Oxl@'6oAPP~)e;f9cv innex22EֹrVJhMBl-bݭ7iwg՛/n+aؿN?{wOnz]sH L)5ìo*kCn[Kpn[ܺIgyEs2ČIDU0Ƅ F"q(m)|I/-c!ܮPqK43uiSL̝)t /9S$RI.kfH/~./55=f %FNUIy"D^9F0ޠ$ͤS !xCŔā@m4 ܢo}8Is"䚔T;SA3 eP^mJ8WH+l!РVSFlsSsoЛ\>4)[wR 9oNzGv~Ym T ֤Jq1OX%6_~x׹to*uY?*O9L2yxPJ8@a )u#P"4,- ;Jn'M&?LORVcF/;!ɻT*ѵ˴A\V.ōzb(=( :_CS8hu|{>wFV"kjj-tzڝ^U3>0+$<,kVjh'.aH㋕U }޹L;K"Q<;]}rٺaAMԪ[\OA~-] t7z3~W;Y; NMaaJ7fA`X'hҊe =Yݳwݛ>p67##uMwMY')܌b2;u8f:)jKOhr}W=vf{a9Ώ7׽Iǟφף~ǖլ9ÐRI9; zH旨TF ױV!R4j#Lh MQGh&v:3:늍W虣þr!6_&ޗZhFm~^?F,eEeLָŶQ|Eؚ85c{J{.-BYuRlӛbw74;X=I}yIsN\?}`؁ Y3fZƹz7rԕ}X\1mYnx.s=㬨MtPTF#f&lKaƶggA\s1Ek;ڌClCw&$JQRf'- >MA x^2]BRiT K;tML|LP`8M0,eIڹaklׇS_q, z[jDڲFQ#>\[*A) ]jy(|R%Y|&YIXl9PR܃o\3Q`DRm. ͝`6|֚85ڃ^TuHzɾzQEԋFF%*E|)͜O5TEx"02{e ZklsŃw;Շe}g3e̺Hb=G>j[؃_?|-|,s|Df?P#gJ~\4¤.]DH-hKIJ]b%dʱB$b=rҘ8Wy|" ysΥ=1ՋF|8voPx5.TqE!=st8v/ ۿ*m]&WM _YO1?Vdӫ<3,mޗ6[}v{ccq/Sr8Z=QKhREJ%_c U& LVG\ uF}7H6jD>}S\̃Q)i% +id!q`| 0S{!cI=&g0JILjㆥ5UyACDD0R!I ?[ LVs,q?./ʟaKk飌P$ QR1A)2m @!FS& ֖M(X3[ OR8NP5q5ߗn sk6%qvҖ6_T QW%Ҳ5n,uq~ ^(Ԧ$g p \gv7cU0 Y0n71D nkU]pn9u$)3:uN)Ks-a!7^(3TĔ·t.kiL:̓K L/RwlϬu꟮tU9 ݠ|wrRpR'Uk'Ek^['R  t9 4Je0#!p..leX%W}q{UEjՍN 9]i_dnMZtp*CDHF*R.YRnED-}*hM6O\pet .pvNL̞h23޻kmH O8Տca^!D&$% _<(QҐ4Ւ(iffzzjU]ɓ%-5P6F]/NzդyN,ڈq%,{-e4@8Dq^ĕ) 5TGdIǽ%ƀtic 9׈Ӊą!蠵w֤K\.Fη2H=rH"@Qb\ 6D!U3%)"䄷E\ J 2>~ˆ氭}4zmP?&T u;ctTR|*8Q!I99ʧBk{n2=94 )~3~3a s%Cy\.y; UrF BEd%!`4,uENL=2Q(a$E41&="y.% !B U}z(5 }HNJWƨڤ*=;nXů;#T(e}Ut| i &'E9bKVlF+{OF"ЬB1hԐlRypg׫sA ?<\j ʃkr`׈<< E VoV{( nrKO?̼ Y.mK׵w|_r|~EcSJvl9qw?t?319n.ߎԞW_z*5T8~^w=qd#EMMuD 'ŎF q~֯=ih,4aCLby<?飉^ԍW(S'.]Q8"|v3vUu鬏Dwz<}<깬 ۄ3"E7Prura \}ugM!ڢgvo._5DJ>b8}͞s)g ׭XxY{-so'5^}{zvx Ͳ"unȽ66l6zf}GPc/o.G]S5Kid1jߔ'n}}ʼn~Ns8a6;)<-]xgڿ7vtuoB&U?nUG{fŖ><ܗv |>l:wIx{ PjR| 1ocދ2ы^O /u߼vʚZ)֍zed+ `9x'<5wKhuR5B%) ?1 SĘ8,!F^P@v@ R& jM9$y6Gi6 3Ee+" :^)h~q˪C{,' O- £'qO%*)9Mm*")NT8Oi9$9d=2H#Y'"">-B.ֺ,hу/}bh,oB%nP>PًYWd_y,uv8WT*F1.zˉFED!7MyN]8+!F0{CErY#TcQ#`Qs"\.it*Ř[d-wlR=[zݹQi^1hOY*Jl=#~΃s1zz?!@`4^doJ7!/v7K9 ӇQw1#Q_^Km~Trse K䋚2I8RR!/e>75rV*jeqyUxNф6rE[FwMuYW3ȫaylWuO=񯭧żv24#l{WbX}/Q.G-ӛz) a}U9pP42ZӾ*hU!q0t C0tc"е'- ]^\;j O XB\_.S‚W `B9+!H$-[iuuTեHL45;o3 RHF%ل:,F8+]9p`%j,Ny "橳6(- M9r"E9[AFqt㩟=VTr j1n:y]b~n,N>7ڱgpYo"l*)E+#WLX+"sgktދ>fx2j'GW֐fAkj@3dU Aqpꡞ$^w3igpf 2^4xOH8kM4@ I㎊o#BZr(qd.CwlPI9q=-5ϋkg [(io2u4uᬊ=Mk$SZqu)۽E'{<GZawIW"kǮ1nuNAFҚ@L]unf '6>5Qb :ܺjZ `ͧw4oon[=_5.sW![nxhknz薎[QG1] UWK v(p!}ࠑ95;p0D0[ڐCfP5v,jwWi2!@8A]5rRp9$ &OlTKŝMIY%6yAM7t xpeEHZWDTIEDφ;|nvv>/?ʽ!Bpq)%Ĵ,ٜA"-9J4J&E]Z5 D]TZ1AxGU )v]1rv{C R(6 w|'֨˺9ߞcrR/S1w^skeus#6؞NqqɯRVȪ-xI d(E]XdU>rVk>V > zKbu3u2/bZ]r8}lҪIuԥx!R1>d4֐hl ܨoOMpk>Ili\-`n4ZsMF7sOٗi*~Wux>{ѵԦ,HZeR8qLZP׌RDmR׶58q+ A*AA3J{kEM3H\Ҟ!ϡ=C*dCڳW $\<_Mb֑RՌRHN'yZ@)VBS9 j9̩e *E?96JP %&U@"w&@)Q МhS{+(=i홇K~2wY!3߯but~LH3U /<[;V_ H] Z^9l%)a E#€O 4@ZPK <^qʴ ;Zhe,\S #Fxu<z}P{rZXRZBKsYB()aM§9p8k.0>[5:-i |k6i- %?S;B41b:5Kj334)fcyY!qtΙ>Ϧ%jli=,|တӆ. Ͻ|p6<~ Rғi4tn487f)?rf r -L ).<7mԍ&NƷ ].ø&6 fMic-.fnJ>-Gf)븜2[qHs>@eQ>jc|鴩of|l\6kO!p/q2ҹ)%3V3vhf~}FtXmIw#A;@֛F9 fN)˘t9o\X:٬_oM絮 Ase/ۚiiA+ZYȸ%LЖ)}uZ.q^B@?ɯ k9Ʉ g2v3-ߌ g<$_}^/t6ܦ`ޟ/In(r:\x[ɦ[rw,/g.&Q2 w,c?[7 ߶䜯MlnRAw}{ژq>]@0" :;S<G_Ni~o;kg]BF02pџ }_o2:_]^tu̵[nȸCOrc(aZL0 hѕ:W҇9K>*ҕs$5b\by;ǿ,{[Ǽ ֬mj:ZVpN2OQQj؂듖&B,-hj{MlHt l%EWLk-+Xu5@]h"]1pBFW>:c] -uŔ݌j8 `tjt%Ix+VQڪ! b&)ҕG=ٕ|!J몮+v@0k8th$% gf;W(_M .M -A階rDO*i+3"ʸ)b`Z!qjJ iʮ8:R+գ+GD+J6!&P+Zt%>;uIѕ۰$),`Lg]eᦾ'pQ%`eU]=dHW ѕ]1-ZJ(K뻪ڋ\"I8yFW A6abJ2jh}+J{?%Jh)+Oޔvp^t(銁m[i;GyTu5D]EcLgoLM*5VH7*JP5=@MfF09WU7] -U2-퀖*蝦F'=$I-]7ٷSꊬ ^|3&g,qZtŴ%Y!f/GW~êcfy`.<,Z(SaA+_uԪ6b4t95ܾJT2!*肚~ }yeeV }Tq_~LraI<&Sghe=T20C˪NpjNz]1-,fCU ܢ@Eb`zH]1mzu%+ ]1.}@K/~P]] >L XxܻpyIWm_U/,  UWOz !DR+uI\+4ѣ+u^Lʘ+KN>,ף] -@U]EWL<6ޠG?.Yl?oWp?9b{ɢW⇃ǖWEc;z&܎v)rfuuϐn:Oa }^/idO7kwtɻg7<˧Gf3@_ơdMѪILY 35Y`t%gQp#iѕЦjrv)i-S[ɖIrx[)ؑ9M=㴞Ҍ߷VھfQ::DKcj] nPT Su%Pu]%;Q{V] 0_e`] +~PPu5@]d늁0nT3Vhɗ+ 6zDNwU.]eђim-l2tUWOzb蜮SAY>)0j ]D ?A$+dwff&vqjl9*J.D9gf[ =ib .8-MlXz[()&.q~B0FFW]1m2t] 'IW Jp=iѕFWcuJIƠbBjPS+MI69,]WB jΪӳ#c; 8btX>y'\Ri kZ(i$tQѤcMFm`W] GWd;UW ,Nh)#/GWqêc>,Իp ɳhw~:]Q†bbS銁Mjt%hѕ5"GIjratɣ] nHZt%)ɚA SYb^zd f2 K]o>==,Oώ6z2e^'ײAEx~{{:h/+6TWp w#\o^ՎHXϛgusyuzʟ{l׹ $G3X`Myw'_ӧ#>] ?_=?uքm!kçǹhjftHwkvo "|w>'gڽ듆V篾ep_GR3/'"lX&avT}IևHoUIkwU7gl7ί_7k/ƿX0^RJl'cdՎ&}B(]LJ" !lID&4VNǝfI9,) 4Chu=]UdU,;AĹ62&Vkʔ=o=<=s,{mtϦ͘?q~Z{ۢCM*=E{Cʗw܀MM@IAOד}TmJw=17v= ;yEou) u<S_S~v}"FguFVupyi'D֡}iwӴ:N3/)DC}@>;5bܢ)>k.famF$4!EXJ3/&' (۠;As{#!XEЀ>j1ɉ!jɉ6i%u}9pr"pr{n⤷fWk~>sF'ۑyg2ށw d sC(ΜLlM7:_F!nl;иv/f]rhڍB2/NЖ!”C|CV?WOR(]WLL]:D]%G׭U;Et] W3AGhui"nW L{xKWTyB|—+ܰq ]au=gWyD*laaUs]prOVѿ}dӳvXtƨ㫋Kå{?/CCfedz_>vy5b7gb?`[+k/NjE[~6?/.Jmi#ӳ|u2Z~/חUFֵOd[4lξ.n2lUA(}dXN/7/ :B׫kknHEŗ#ͥ*=ڭTnRq}8N `El[`@Mppb{iL`!f,Uج\Gh:D0fo.]{i!0·mo;7+$cє"` D)TSxƪ-oFcWUONC5AjKaРjz|SxDPJ C`bk X*eI(\ nGƱ$Hg &C H2:08!ApRZpe<"/È1#@:RuȀEmS;U2X=+jn-;Air8걄2͝Eq5n1T|vd%nf,qE_nY|]hm^Bhy}f )v62t*: D)Nѡ|ŷQ׏Uyp.TRyjr \Vq"i(J5zj8rJ(Q3r!A`3X;Rބ5 V]i.KUuצ\@u_A5 snEU/?~2KS-aw.n}ws+~|*qUWJ\Ubu]Jx!bȘ8f8N#K KRiHjL4Hu*)f++*99?)6Q񭫧^o>+g> ]P;n4WZg,C?'c^`V.X,ɧ>S%p##7z↟EQǿ{`*3 ^u\@gfqll|k2Isq+9+W/٬|k^suߣ_+-G~-N)zU"{uDҌe`iD1 &ƚm*b%OX@Q~>}SwVUZuJ@REbe/ILǁNOJ5]!.s;Oa Jcz^(K-z{Tl!Ȕ!H_B`s\ԉ3Oнa~ %d0HLl psE^^({;Ay 0 l wz)@Y{gV<RV,.$6Dv;^ 9K奦B qGU7O0zST|(.r87ٟiVj-+Q 6` ^XwǗKO.ʠ;xj{lc_w?_{'tSS;{$b>+;81_ǷŵUY4_ڋnآS4?-m϶"+iXG *A@8AqLCqɩs\erR'0вNrŪ29I90ˀӔp%QYj,ccDJE~lR^)sL֭Nfjqg'd%r"vLA2d@QNqJҥզU^ĚoRakn1"N$G!O*ʖR͎LgDqz^KFZ2lM_ţ|uH% Cڟy"ûGU<]#ؐ7cᩱ7UY =|PVX GoROrB>%ļ?抇E`}#,#Cꮗ맗c"7/h7E/e0SJU\wr1ѓI ֵ#ܾ Iawؓ5d1t= p /Od%wzB%n7V*/a-to`0=j*jv!p eM'xK:%%ЁƶB*OⅬC>捪;.o-mM65_i]Mn3SX!k,V1FJݩwی;ő8ŀS LJ,w`q'R G, Ӓ$#`Fbis 9|rSh#N?/FM[iIqNG\Q$i7T6;=Vnp@CMo jD~b&S5JT~BUY^ZUwbyVt/)@@A 4bI:z #JW'x=Ep0?arC{z%ڗWyjË]R]}fJ^4K>tbIPMbQEƱ)MsLyH0yXa"ũ+ŘF;)QqDA:vNKQîpZEcsdE{E1 K#ļ?E6~i4h>To1Ȱ&{,C^񟪸(wDMQ~UQOB1( gwy CzCuEgRS}^ 3@%B~%2!Cp;9S\H[ӜC!-YMu`})2Ĕ Kuq toZ dzȸ@jh.慬&CWN#Jiv&˩;2ÄUd(.?Uc0I'|:9oAFÍgr2@e4v]zicw=ފ!JI.//aKW׉WPH}19DC@k +z~RmlHd1(I$ <.O_S-uI O2կ+na1~vurLj{7:ń냇I9_ϞDHXճߑm9BC5|>5p-xy(ă>lĕX'$lHM#O[kCRJf:A4IBPa`Wdբ }zބIiAd!ƛ֠| ceL7wE4ݤ. !x, uQM aDMP'2<}7^,_Z p54F_L~RQ,y 8w4L }_C4.PBcb0՗z^I2H^ʄ4gU#m3jh0{s:4OF}_,]~e3}83d !jm_}Ȩ%Pv2Oa.5(uv/"E$ 8A)YJ7◆\6uVϞxO8lqDt>?T `;XMGB0 JЍmɿr !>k\rʚ;ר WZѐ:DٔBi1D Ni$=+ƽ̹2$ǀ_CA$k5}Vay^ `^iOl񙠳܁7G#O]޴#$AR&jjw8j7c=𙌓08}%ڨ59xH{ku 0Qf(,I1d 9~iv R/F4ܖxCMET/_գфv[(bora4ܿ`G}h~ xCݽv63ePzJQq; zQ]}lc Ŵj+=}qÑ-֑1VANeqթoDRzMSjQ+yvc 4nE~AWv]5a,YJr}>lJVjWv]d2]}x;4%=/W\Vn>4v~*>zubnF^_ ?OϊoMYR,PFASS&K!6eoKG=c>_],0  [1STdP$Q:J]uO; > H"Xm EZ(/"H%pА%̪<)jEIA{(گ{@ݠ0@neͷo$I ty>l>Ԝq.%cQɬ1FJ3{G3S53`8ܽhkU-o|̮xC5/}KRIi*pƢEjT}#:LFGIbKHXUfX[2g(CVj''ޕ89ra6 ` P._1: 2x>%&GV $ʃ\$Z{dvCUcm* )d RrۓeU cu Ftbxy+FQDEu*MDo!toz>98 Ehu_fCuuzoO+ʒd>sDz2<Dy@% ~S->[WF}UV1Y7BpzzCew"ڽے|"J";!%$ R}:{RQ.kS" &,.9T0%* i :h<dЭ;?Q?vl/A٬F I'p4<ͻӫJ#of/<̀EM _<ӓZҪlpr !qj2 ̵byC.LOYٺ>%.KX!if (h"¯,sZ&=n`DmCnM|u7sX0:;>!n*[t,uI>qXbbI$k\=.wS1w ` sH|4:Yrse~&X j)H#J%:6 zYQrw12DPjliYq!!<*S~S3S;[jQZoV+WG # |τ!\(5@ce#|I%D])pM`)eJC)zZDϨ΁aQ LH2(9+w@Ǽ r!KwQ@Tn% vW*Y r%X ƀ 2Z(.!XA!ܘwltƉy@SWlq %?xF[-|q.PDc2&o=>(bRJءCeo2X'C f{ D~f9R>naS#c9rC0QU]85qjE ż튭WC űWQ:>R(0A۰M#H=sB~VH*m1PNb&IWG"v,EqCi1tBdc_=hVAƨ?΀USpPR l[=KC>%x-vkE 3E | ŠTWsXjuVv]nʃV<%yj Ov&g)_w=ر&! nt\4T&jӓ}X*P1X炋{V ;i1Lz^.:rgSh#*M{i$9+f#-W: ޯKKvv.!`/^OZGzDG=xrB[lʸڱEka2I3bCzoA-03J(Hrh|[y²2>9ZQF,u6bϺ¾[Ga?_xaV2d ז]K5xP7b9y@D$zj+pd aR(jmT^|1Q9Li*X<݆̑\>ٛ#+Z cB2֣ h Ah؟17^xc.2_Ɋ>4PG"YUbk\9ʮ UE)ζZ__S,|uy8Sbx!j/+Aap 'oS㖳 z,;TI#E`Vb?frYEkm,uz̈Isǥۿ[K-oenW`!eRRʄ3 FJ=^n|=5I dS08 PS.d"Oʬ3K, IW-'{-7^9EBS*E` Tmg mkeXpBY`,=~~J2lcGېùPgI]lv.2@hǷ ըRq[5 Mv9fPSvkE MZجF;0xބ{-Dg~ č143M9o '4$$Hs>lir;$¥{W8aoӐd[($')[/%4\C"_(~䥎)e.TD ̈6ªBe{w ʇNsD` |3im!RZchJ@G:$I+n`qkw #(g1"X4;J$ CFAkfm L(JM[qT8mXekn.%E 2$'Mɻ  |J#H<P{ ;S;,Eu{'y>. zD0ѣTBJ1U߮d8TW`j8y@(G,fn5L? S $!)ݽ.<a*q2f]0S$dՍa$-( V|'+qLE'o߶4?ァhEbx 2F"A4vlkaCF)uy)MrArUĥ}sp9l]ʾ*-BP7lƹXX"g좺Vn2fSJ8J&q'\@ w_@1 B cSap<8]T F}_@9|1]NzYl7.wzv+g,3Z* c< T8)u#( 1obg<ŀ`\a $(Mn/ 3dvJ@BRHW2?zz4HSc댩f 0ng#攴|45])iq5兏Dm]01g/K^*(giZ:N3@ zǡU/: ǽn1\nZPV3 23G+ܟgOΜULqqHjjpJv tG > $2M=˪F:UUgp3RD0fh ϙ_m@GzX'd=Xe'@ϻWlcLBܶ 1OD,'4@f?.ř ˊrkj-D u[ZAĆZŨS.券 FYk?2t 8A!ȪB߿_ zg9›0xZQF,t{z Bдn LIY'WM;£?~\_B*- žS'M{;$'xgG^M ;*nZ^j0$0%c[z?O藿?bZ|[e US{\_e)`zcN=}o@Fr$ܜe*z:L+;${g8 W4;/\Zq\=(4*>U&Sj目 2xᕪ.]DZcK/2YSI^C5F m~1P}yiQ 7J|l*N>)Qv cJ@`.LeK}et鹁kq@Ej@?"% )!`{ <*{7YqeBxh.|ȳ?3?jVE#jJ '<m`PG2Zm-C3|9˿W\1U*UFb@lR0ge $ ̶JJ<. F(ksh:* r9,S=A2[m!=`,J+pssPN:1 "$Is5?ۿ0UA [eAsst*Q`0'OT0C7ò8C)S1ݻ?6Ua6V@B+a勛ެʚW|L_N}\&UϼSLLnIVu:HYê8+?LL*\=;9ӗS+tYD8TX)H8#Rz-:ݡeq=:/u|q{?GO_>7=[v?G2`nLg;jF䣈5EbHM^9r1@Z' #4heCtmF`*Clc^ted˼DPuD:3ҡIs'c%'7ݚwkr1M@Ԟ}!f5s|ۼeI;!5Ҥm٧vFS#!Ũ l5{Dm,d2KѬw:3_> 9\i6W^Ҷ!U+?3x'zLTV|i)gL\P' uv1EmW{ᪧ".Qup(Lm zNZyH\_LP5#8a@{'zk>%EFE!F];, R*얂Õeݷ얁O 2V;QlI@oH?`<%NO{(#yZJS!niH[=]šOx?T@+[l…lYAT)D{7yspB5D[ H%, |7D[kz{CqF6z5[ji|JpINQ#zq\1E9VW_`ғ-I3Bhlt`3-[q9XFB*VZp5 EM)ٜm/%|~PEU-+J$5 O' DB_nA15RM,/kz; 3. Om 6N=}cw|孾KJKh:At|,yg[ 5/-^/sC\ќP; h$4N12HQ5.JD7*L+r`t{uk~9@SjmQR9Q# Wp]w>:7ElZdPH g2^xRGK)mI1Fd(2<{g(96GmJu@hT,(NE{sxg30 [gy{}< 8%d@hux!yw˨Vb3iDP8c+ rormD,c~tL7fՠPg߿O~>fɒ^MG߿%n; V ൾK4ƛdT ubBQ+\D#*ki)1*р|/i6?q>~2Aq8   r:t DݡA=r̮Ơ0d~2 `t^ -ѭv?X&5Bl8uuÙkF_X#)hWI__\6g%Z<|wv~$G#I47g#_$?-7yj]\\U F BS9 1){JɃ\$4_}njѶj!|ȰόaQtf=o$޾ *6h 18ajF*@j|$bb{\cgH1Fd&9fTws!kqvLY ZmG%y&htF cЙxtBjT2h!7Yj1.P9FQOan,Na*)^r'%QSzqh0kJ+ZEX1Aສ21:(+p{ Y갲, ȹL@ɇK~Qe/M)"TgbKfD[>mF-_y4XO,Dل4+ʤpfq*(x)Ə# T$%;Ƹ\^  ޚ j3lAҐ!=͵*K0v썡R#]y />\Ec,c8l,+6ӷ#m[Db( _l217:v;c'9C ѸHђDh c>IٯYGt▼ן͇^Tgtsώ!0i7X\퇧X{LTn?7|LɥDQ9]Sz qpPS0fP*RzhpS;ϻ$c"RT=M|;8l the&fbxl->c*밾e7GW6.fBe_q[ =%;rB211o/bͶ=eXLbA &{C.Z-n+{23іnRO7GR%!uNVV['I2[l;_3BR;n~6+2ѥ-Ezqi+<250A\sc2'D^Q9hnpW-Ptf6f0a8 g 7-L߬F8Wu-! eTb9h11F[Y}`WN#Q^hu=sܫ5م/z;w hE',-Nڪ;ZOm+BJ2+wVY'޿fk-3[n̤7MnA*iy0&'`гV"檑ܢr~ 3֘2ۙO<ԉ`&r2(95c*/SeelL͗SRGm@KhvЙ ͼҁwR/D WI)Q@yD ?GUUZ}w87qǒ&b񙫽MӺ$78woXB?]"rMѵX%ޝfkn5LKErfâ=瑔~Υ)qhN}n^m0HOgKVb}h2LWO0Oʇ2 8PF+@OGaw?LW Ci?1AK6"wlD?T|fwL6 ;1휙ǝ'g9^K$|9R [:3 ]Mճۑ"ل8 xEo2vu%¬sXNFi?tF9EwwK·֚3K6af4~ pi{5B[H2 gwg NV=N!Z2[X Ģn` hSivSg[yoGS`Z:4)s*7vs-J/&ݓmFS =Px5` ]y'kea~74 O6p<^ |w _cNs:. {j9ԨK `;c>v]Z19 fqnaD_cm.do"w=U%I}$YgI}mx tnqWcD%:0]$ ܡVxZ*oY_s?xlڮJ5Nn|٤vajRynzqV93 ГTq9}xVOeT=;gp_y~~\qsKIݤN=w f;[3326U_n4K0t )y1\E:GnBy:߄.x}:).Nl*`>8^gIዳX[Ȇtqz8ל7x8ON#lv2"XQ?) 7{:H4U7!(T趵z^xGF5`/ 1bxpy)⬒ w-ˍWZx#{z#lwOJK"YIP+}HMq*H$2!d,y(D$"g HMz.eKa\})|v~93R %D<\dHLL.r5sCK^K&[l9LjՈ^P?2CMC*r9.R;,lRTfՋrgk%#֣B NqKpU61'm5 ?2^&<@ȒĥU7Q2@Wrpr-VjTG 4 ~V{ѥu7)[5F%7g΅m"!]EEKtAM M(O6C㩒4(q+@|5fHX<.PZ椡kICsmjK:ŠLh"6EiЉ)ፇ$j.d3*HEamgb;3nD2JUbQ$V@q%hVH*3"\qKdm2C`F֨ɉ1o{D`]}rYʊ8,BV!-, dJxuFYzSQdZetRkj5ʰ z"u"eja.G5V6Njl`UC-W?G1ҥfo%ݏ~JG;U a]E=A ,pu6&)RȳšgxeH<0U^H@g;R4tHBI3!')5iB11 +jw.fip -@P{\nPrH@vGDEl(QTHUc2u'*\R78P MH6)WhxP>m3`/'MW)dˋZ8nI9,8t)s\(, ;fdʃOk*8.XBc\;bꦽ"n-D9(,\{{J1Ei72ƃd$:} #W37OOLm(\ݪG@Mzu;.;3& ogU5&z.\s^Y_;l=:z$(]^~9VҌ=.寧4:UȒӴBC} ;Ҥ61ԴCY_̦!ؖyH\0P6oޣ E~K  p\#iE$ ˇ+M I}3u*.*|ܡ;o ߗz,ql Y`Y6}W=ښ*KG$&h$5ЧT}_'+v ]:'C' jGqu/L5ʄiN/\ _=X%*5[RZ`t|C}}.C 3=$hS9HNydQ;筥ZvruV_ 9/,ɶz=zRRzWpt$/> =,ÑkD:ҤzfDbQ92ۇd!>k*>W {˘ |-]]qN)ˇ2JR`ry\r$M 9+=\W-6fZLL,V/W5Nw끎|dt?zt$w6 %eV9bQx4SmQqJ{\u j>b+l;읾zA70Gy͸DNDjM I,<֒#t@I)j܂lXcm+&Nz;60#ccբ@!M S8t#L'ɯ⃍&gy8\JBuwE[myEQcF$:$3I¢$xOnY5Z{/\4Uck+8'5;mJYGj~sU@8+(]R:(3X.Y2!p `lJbu?CiJz.%Ƶ-PKА› 4t}MϚ^`׍v^` ))d\Ҋef Y:KHၠR &C+IG47r\:dD}eGնZ쇞p7}r߁tyIc}D ZPpN5F*`ђժIok:~&A{BK zaM"pMMA-5h3[Ksw=6(9l$گ_UsU΁6NT:`?!o2 d%͚5Hǘ2UFϼg#)]W dktw[UP`EB"8-6>і}h%dQZb}D;<ҵ-y׵$4j,mn۽pCo^-m6R?$C10#ԳuLR9Lhw% jrmﴋ~xNUoEuXu݃>U =tZ57Y_”ܦ/[e@Ù_=SCbGC-Jkχ拋yM5&[ڞF. :<7%Ewl2@$ᒴZJTmmfzjpGTWJY',7ң{":j$ Wv %q7sK]N(|y)\r׊OypKj @LW$/x6aۿ.TDCud/>PǾ$b2/>5 q] 槸Ֆc.b)-6ӍZ ףW̦Ƨ{X^dƒ]|Zg;1x ˯hDMfyԻe $U=hy_SN'ԉsl>=w֮*γAçdOkuF FUxGd' 6v |uNt V)/eU xOxq{݇6:d(?~8LՋp{h4_]׃Wb3 ,25,3'ʄf[^WEٷ90{3[:fjU,P]<,02N ZڽZC_6>xtïAU9)>8fx2}So\aJG!-a`bL`0pXX|w>s6٫_WgWsb3^f)gTGc(Uw/ !<3;^u{ }oOwzG|xVR|S)h9K%7tHaKٯ<_)}:pK2c 7 B'N.&/ZM~KŽ(J/דfʖ]I[ocFa;{U`'wۆ0]$Jo-ijI\tRd%̰O'}ʣk@|:Zb$ހ2 cvrי½{;0er/௨!#.͸Fo20A;_\A:>p$_b/ZCa$$F,/=vǔ#( o)(ʕX}}(xBp9%a(\駋F9K*ZomJ%X`D[H}JFHk SP @$:=fmcr/.$FyIʦ?|H?jKw9t_7{(풏(svPXL𘼣g{1`>H81-OMREc 9&b|Kcv OQ]TT9j5U1oƇ 3~c=P6k z}C@U plҹ8n(O&>Fuw}\h-/:<}I墨wH@"sF_/OH>ReA`U@/޵$bq/H\0gA_mŶHrbd&-Rı(6]U_]oH52`q;qz9-L&yMW|Éƪ-.`x Vx Ņ9.N!MMM 53ԧ1{AFЭjς+w޴w#-[6 y>/DmNF(OT Mn|}juyP˽f7ac_ϖʭQ퀻 <㉼i1nR_Ǹjj{.V{О}5~{+d&j.%լqYw0ƲI2mñu5k[|9~|&QYLR )/vQWScs˴Gʝ!F^8-φZEbTAr;Qw!x1{X_ G}oΧu]IS bR`0ٜ8E*M[`&nFlP^W5fZނZarO*Nìb^<%(K]t>HIIP :G0}|4|A*a1ԯkSXA7\M;ǂw- nr$Ϭa;S>HQn:H Q1{=Yޒ1Mz\n㡩]PHez p؝+Z}mJ:`nZ)V88kZenL9ϳ 6鬔όsv:O4䶌='*$*Yrѳ4Rd ׽ ZF;Rwb+[7 `IiP2vaeG4H́=4i#:Rp\s6hD>d[ 8I_کвaև4uUsϗS_ceո`SMVHto{ "1[<+1Ƹh._So,V!<,,zy06>8w \cxFIgdp&\U;\V65kU 2F=1Mc'U 4.GU ivy <.;=52m1F6;i R"wٜ0y33=[iMM2la,܋ύn KHoS?hv7ͳhÊOyVSt< ;8ܽ=sT_rLН21uBOm: lcX*-fXʁ6[}#˟?#^YǼα{':ǚyc62ͮ] f;t%7Ղcy˱hL~A:UfY;W#_-x6׬B3P t*%^I} /\l;3դLfïW${)D-tdvb\Q5kB{Iܐ{X3~g!>^-ֆc%(4tL GO~t08 gǓi'ၵ0c  :xIT*a`yR҄%$D1(Ť;Yj`)Õ'hX0[ޢ@_OWW{"oa4e3{/' 'ũ`~ q}w8&0hf~3ItBb,*+$J> WNRyaIUGc!YNG?|.RI%8a)h~Qp"jUU%1^ 2` ,H6oaӒ. dj))8&2O= 1Q#ASX3y p1x.{ѾsKgjAN(;[]8~ҤfW[نedӮ˚Yujè:fTN-SL+pzklW9dV 7\lΎO`T %fGg=JFSθ dxUm)AhG("B\ sm QGͯ%{~~Ow]!5K( Yf7ӵ=}+qmI!QJ+^!֕  45AjilkQ4 WB)dtL8FD[#NAbjb" (u*rJ&> lјǑ })\G66 i.TCw̚ZlIj/5Jq';d /H/˱J$=@+'bZh!4rW0iҧ(H`O%L/  gif;h䆪ur #BHZ_}T`u%Ϗ1ǜ+޲v2:kpYOc+@ URGᔎ\wNa^w4mq dԮ%ng'j_KFjxZ-b2 Vp8-(T5!x%!58VW0c㦿[!So~#U4Y` J6Ou -ݨss VaYe2 ۓLghDSmBSNi-v8ϗcaX I ƴOMZc:0T"ž/a5%]kӲi$`RiNǵK%.z4yy X! L ؔ]Ԭqϥ0r8-X22XB]z_-jv9_wfImϛF$礶U/*mMRƜCӜԖڶ78;%ɴ 06X Ru2Q(EL5C`™QǗx08evP6*ia=#+*E bn1َ)5+~8#R{忢pݷXg>N`JYϾ,QU,38MC"4g9:WdOØ@μ"ƹltOpI{ս4֕Hk(KƔ v^p˷3{zh81òv ~G#y~!g)&4{W TAX0`0fS MxHR9pH%Xj-7fX/Qs)Ůǟ/`##lb]gj/[&.苫^ſE^2Ł73v^^_V@ӿnrϘ 3z'rYd^W!>mx g\S#T^8wt$>EN[΅a;W r=JZ,=Ƴ-8J^>gȿ[r1BZI:O@!3>YU8WhbE 5x"$d [udz::ёU8Ub3ZL f\r=):ԯ`*W0ɏ&SƐTt 03}(xIԟ[EֻbqkUYr`t+lNԃ`$%mu=7oQU ™K˧8.y*x~JVDrMor=B|a)Iqgǚƀ*j X(N7 y'f|")i(S/޷O9a@ r t:x`4|Z_o=:1|Kbm-raD1ƃ'PiY bV/ft:ݖ%w.Ҍy LqY,C"NcʠH Iwtn-c1S.N+nVe}t;J"j7 nƼGV:`85&+:-Ғb0i%o]u館źbXFQ^gFAenϱ1E~6gHE!n[dEW0Cj +Z0pU!" DqƸCoKEm(RrEC|,n%])$jKN@*)-t]rXI>*Lw>#顃T ;Jg|$@cR7UЍ@`@R$=$$Ú)i ) y=ʽqgl>htCpb8MPV!Kl~ƃF{Jc+"uN e߬ҕs}hE/9#䒣wK0AxZp;[| yY.[/ݐw(\?{G ZKh`vp0dӖ-KZIz俧8ͥGQx% w ؚib꫏dU/Y[ke[pcg q6n [9&gG2$xrJT8QrOSh?2*v*<tͫ%(F^B#(XJo1R q6!B}z`P{Vծw`@;A7Toսo &>R|hwtxN>3@zT>M`Z=w֪ƀs_:GC7ܙ_ᐘ#4VA1AֹnLǁ2"zf[J}ΝGu#/8db;g-) ۘRԃ4.Ϻ0"#swL}H=` {o,Ng#|:_fo:ΙEj޼e<uT?.Wm?;g3}?޿:Γ_vvϡQOz4C*PGlu0u+*FUUSU!롌sW7I )/u: Ѻ`rL yk0:g`j~Nmrj_?^L5-V@1%@*dK+UقL7qi;=\pTR:bG8d5z%Z.<`@TO(TH[ N 0],] fK4lWDA"J')Lg ^eb9E p٫.h3$FNlzyQeWIZ@HMd9"KϪOB1I!TɩC]b `m!8$64=O .GɘlJR T|PSM4QMUGH<+[Y 9'6ߵeE]W](k6ZbPbU%NOqF֙RyQgΛ M :#N zz)` I|QPDrISdN>G_^d;cGgU:f3n"Y#ɤ.IUBiK⽺µ9%4)!&I /VfHU c3l6`C΋s%j'e) űJ&Y^sp~r0t@ U dG{ Kl??n߂8ٝ'ܦC˹ԧZ {肇^pڈ2)"q0} PϏk(pXљ+HIC[^EHC䃛tKf6' == c1QQD@jE%6-y|yR$p2b2ύY3Qgrʉ ot}xԛJ2/fol̷y =΄R6DJn^ ufM6R0.`Ej)D,P@w=AT :z'jˑAxnX |qiRe^?^|gUkջkϪ=bΪꈬʏG Y{Ad㡉Zc߸ Cv`Dž%G1O̶qa o ~ߜΩѸz.hS{8U$y$!?iXm GT(jmT"Y'P_U{zDOZ݈2lm^#mMP&pv76֙Yfcȫ  rMAI~(~_3M|d_7Xcv?`MEuT?b] ͯQ8_*86/gg~:\ىm3cG]~3e>,Ymŷ|8i|] }ųp5f>e糫~Q<7Z7i[+yAl+WwGmܱcCɽvkμlׇ^` /UF=I 6 tfU:bsV6 VNM_͠W_fsWhH7#\Xk=׺i\ͪ}s`]J_嬒ϯΐ\AXf"ۼX-y.[R޽P- y.Ǚ)z!rruS}È(0$I&2ҝit'ĹF80uja9R{'.І[rawqwW1?02`Cx{(K # +ç)4b|>a$SbA[DbF̵}knDVN#LM޻MZ JM\wy)Q`yy:ٴyw4cB=U"]Å:ݗ &UE`$I4FUџc>F(D;y| f} 2b.ItW|Jh NDrx yk_FiZÇcЃ ʪa h`kt-%h&h|)vrC.jX|yJNԢ?T>9"ԝSw2i>vJ9dGJ-Va烛:kg,,8{8VOj>VLH#ҥtuZ&ph6:}/SX/'vj7cM,A;YNҁät[QH-Vr-{qN0RuĠn$/M<L_[xD£ݸ-9C mQ [`۠3v5YvLe`y?@dZ>,KCv{EvMA4dPb 9X; d}o0&.4zHGLm1 2ȆՕ;PhVD@;8_2sRqZ+fNɃ-Yvc;9mC;-A1U-Yv>u,A''lݒ *vCsd8/iv ڭ7`Mv4\́@֮e؁ bgj#½$S3РͺP 9ǜ0FK'O@L}}6Rl<) xw}̅E/:rNzwٻ*W:L}cy^`Yw&Is}vAuQ'9,)Z!ՓCzڳ=pRZ᫾Q0>N } uZP' Sc̛CYcYJ*qBOu 5Vz{o>2XW)>+6KT4']F>Rut]u,ź S|r"t$i`WZSJIΗ`i]lC#&є23LrU{%TXwmUIs` ܪY(0[jpiUzg@ W=f g}kje告*R% V"g0^dLZ;9=Fj EVp޸RP4/`vט 1 bW)wqRjt$QK Xhv:'e{k p?6i}+kO#ض(^jOgTy:*\A5i ƒ&\r1W*eIl*%8ɍIꋸH"n t$}y;ߖ&!nD 7HA拄O m!B" 9 bI#%ME%]Wl&vIj- * n>c!([Xid!\u[8qMV{, E(@FzB]Y&p2ȌV_0fkqdIz%k4` jM5:[C&BZ"L閭4 N& AZ=0.GI#Ę׭*/ 4jr$6 CZmҠSfFEriP9Ys=0ֆaG+F|W 䎠5hz|W'5cRgceU_`+sIۈ<7n*s"m C+FD&xաk'n^|4әki4%Lts/ܡƯ~5~m|n. )Rcq +9_v"?ijn#Ktqӱ"g _F\1ɝgNB(gIO?|uqO 763 Q1)4/sQs+BJ*xW#bIKn)KamU1@xD0] hZzl ,306lI)C' % kGa-uyAIdh2}HS/ `(q@#tP5:zCw-r|՝WU>ÖRK2{#l qY FQz^SUg2*M&$>t-9=0#kڶT yؖrELH%QsAHqlQeZ^ɡ((Li,18Q;/>e_itUNv W*C>;6>3 +kĭv|1㢥^xp)>z.%ɰcQ N+jT K#.g˃awJι$b2B4߽~|w<}>'tHoLT7[1h|؛m0HK88BLtPҭ4q&Aܛk{,$#<`it7 0I;n6O%$1zZ4.EM6;XF Vqx4h . AwSQ<` 7cu%W6a`Hp w#x+gy鯟]lLJEY^P/‹X'@*tz/Vߌˇy7&/3ewL/ ѧ%L~hp= }/1Gj*L|f]H|?ǟuogG/S~NKI O ϳ\F q7vvog Zߴ3^9;ޙJ^h>m\\]w3p9C7(ioi]-]3nN܂ܰCZ h鹮Fn1+nAa -msK@>r͕JWA0H s_ߝH&ƥd.[mmB0yQI`*$m0P5Jup-bK{%7i={;F#m=6?L*wf 2%vg\BZeVv2c[SxyuњDiDg+޳E1 Ϝӣy1W :5đ:%UڴXgsӣ%3\G3<:꟧r4z'R$GxdQQ 9V yr ?Fv䈾i%ѕWG#**)șRV#G|rDwo0"͏.0d≅9',Fl8dͮ>jP;Z/a}AdGVAjwƿ%zJA~Lj@eYe OvÎрԵAnq} b cXn*zp|,E|PڝoDjS;^Y߅Ϟy9w0jwP]FuPډr}T0h"@(s0M5ă(χOl݋>̲,`vNZ]p0YNu7k9A7#+aw%jq7;p9Zr ;7ǞFa8ٖXjo9 vtȣMa)sv'Vg>3=jw76(8記ąj(Z$khyds-Aכ`.ko~ ,U{>戶MDYۇwo_B=KۤAo?ޚN\FämbZjt90Rf0Yj 1u酪jme޸soM/޾7[_k dQi2SV$-מJxK@=X r<"ʻiJiV{K0w)%}j JB H(oW2A*'&Mǟs+V5 ]Tmp14)0W F984+Q(-bC}+Iǫ Xb~6_ŝ~I,ݷir0Zpe1f!00$]T1לJd9r1VL!!B55!k%>pWW޽Jۺ Z2ͰXS,q%f֗$:^ʳ#9;87e Unu"dk_1:a-OR.͔k`"c!?^ԓӇMޤYtZh2bj<7)b*zQRc0w[=!iT'$1 A"#Brhq9?0 \}1߲jWLHx=mi+Jyؖ@ U5%UI|5+;U!HTmk *>EA8?z#OD~ 㮢9g|2BvL5PĠ\q,4@K2x&+A NZ 1W=='ɏѼ.|17xޕFr. wf Aj!h$ص0s%IȆFV7"ꓝ2#"#C0.dg33EjB,7ǥR5s JІ$G!  A2͍i Fa=)!4rxxɘs+о5AoK1`́)H9wU bwnG^jxYç=I Pis\4OAXxr5Gldx@@7+YHT p +NnF>ĠsSAu|w2-EWC- ]hd\=ޚxpir {a,1SӆY֩i+O@J[Br=EEe~)m9cb;;Qq:*MWT tSɐyeB`ueG :c T^p":ˍMD̫l#xII>hn1UHTHDE0*$ß :I}C yfQ 0U6̦{&mJ2`t)o)iԔ S^;dAM{q?EvLl}׶Ft6mIf;d8tW!Ac؇lJ2Q}L輧<Ԏ{sO9HʫN $ZE{{J.wy>^ 9Hڔa;h#ARh|͟y=" hB _ J錍/ticfFd,' "&cݵ?jd2%MK((h=T(NEkGHW > R#`й)QEloL>{eEIۊ?jhv S-ǝgP+@ɖw Jsխ(tK3CUsJTcRĪ [-MUX3O{Y'ul.)Xg^EhrcKMY)!B.S1tu*j˂ݺ$+/7\T#Y%i0+X(q#jEhxI!(#"ˣsFKJģD<I}T]QdC(!r:`E/ک$q{RnURŜ TqTI/ЮCqN ;phlT\ub(Ӫ g\=r)yJ J'j"U'yJq_,yJu7ILv퐍1A;y M*TN+Ю@NէP Cvj5hFN+ )!ZJQ*CkzAr D;sq=28cq1ɽqDࡸG=V}P~JRþGȎlqj5'*TpIA%#{tL)aT6hOԋ9&hg(.eKR+ eh1BOZs:fjp0$XXSzHD<#jo%6*Ci*q01k$$Pi"QF⬌\0,! "%>6;thT7Clv9A\ ׋~m.=%e?LP$n7wEY8S'>+W<9dΧlAfmK&+Ӽ[fjRč:ݡ$>L f +%ˁȭX6s$lTSR6g D$' gbG{5L81>kx̀(j'h5,s|`aӠr%ryVw 3}q $'G@#U$RMq Ѣ8qQuL+iKW4B8Ϩ>D@;d3@j*Ζ)TH%(Ȯ 6&jYrvXwqy{1O_D9nϦSEǖA#_3s:XD c88.ԧz.۴1ɮo)+Ζ/)*KEh >FU@wMAzAoULMt˖Ira zNchZU!Ԧf$ox=޽Od(#M6t5"Ab(vU LARۆ*־ʗ+YRL?,lun:Yl~uihOެr|tQ Ap!o{p/'o.nϿ8b6V)"Yl'JFo2r6%?ak/'40-y(a =i](=C-ѓٛ =%d T;ڵYy+oTGxoiCC4qy[ {TK?\To2xbs`oA* $de,Օchk[Akϻ_48 g644u|u|㕜:NbTy,B?>Ww%vzǯO׿?}ʺlK~#34ޓN2oM4{o'^Np;8=$V6czʜ  a V"-".X ?{~yl$>C$k4_+"*"̸fԀarUjN-:ty2LٓdlKwřn-?x=7b|r1. m.ςd>(π` ^!箤Vo4?qzW/r" t?O6`eynזxdzPΩ(0mO@iCTq-&";v!Xy@䛮V\ (jcC.cX_UimRCpmU6=7k8i=ʓ#%;{J[! 2U"0R,Uea|aTV-;E@[]'sz&[@.!û bQ7^a]{/m@ R$H_7^w-?NO1ۭu4IrhcI %J_mT7$aZ456h QZ=Wq xEu"Wp'P7ET%`u'q |EhqJ 2Ą P(<OJ$iļ:8}R6 hZ::Ͳ}eŢ)0\}L.zS@]Z. tKb6r?c<ޛh1)I);yÆ=6 ≐{2@hFƥmԼneuǫ9*\i{1jF?9;^l᯿QRs}yy_ߌX~.uvo, BD*"q{Mju$Zzh%[Dy{)lvw? ^bQ=.YtW5mxqB#7i<){/DwAohp)}M=yad=0żff0!jf8L43\v._G@1Cv\ z99%UnN<޿=YQc?O~B5̳6O2 q(tv>5cnQy .]N{Sw r9(I\k0I)i@@OKGAz<$˻y[ \G6J~6,x'<Jkxf=ᣵZ_Yb,ux7epyy?h7/>?Ń_l֍"s goyM4xtzܧ3SeINŤLFS>j&"(a%NB4%q s.B(@i 2RJX70򘞽ƀ'4*Ob% Tdp4w#R_EO.,'e+#)ѠO(Mbr$Xʂ # (2.HP:W%^MQb$=H.F0 R! ]tBgShfDiD1$Fʛ~ʧiTϓbBC &rNSo&1 EW7EߍX9,}>tH44ۆ*" c2IC($)DM6$@E5>- !k2Jt7GՇ|I|:щ-~5 Xz2s/RF|d#t" \ u g8AHl}8y;>y1kCS&=ʫ^lC=lqׅXiY~x-ya7;Fuk,mYv6dM}߽7eـ" {v4;K.$K1ƈ~LC LHB+ ĭq^WLtŽنw̕_3J0B!5mqMo-]414מ}KDekcD]{1=ΰ8VY25;&PpX8'EL!oKX`ÝߝS$2mxǷA,B1@i;2O(e'J3п+1[Ng喓o*Gk*GrQK&ԍ|)p7)JMѝ d\mw1M[6K3.GLjQc;LJԐ~FE~+j5 m+fP;?]&j_c?\!?\zZ %j#q}Vc`ƼhT(i:3iնËU"XqɁpb*XIRġSy=Xу=Xу[`)@ %6;CV (r R"pO 1XRTu_T+Gݗ!xs6%$MK aS9XhFQ>9*dp!)4+ahC㾫&daH ]€]0j3HH }^;V"kA05`!G0TsEd0jha#'Vix\orZsAaEAI$˽8lҁ'O<Sĩz|#JGB@t]y3-dO0IoRdL¸ٚ?iuK1E dy[e1'_tQGA8(gVHlCU+d9Rs-S01iDrvA FFx@V`H4)7 4 ,P OJZl5tkO6k[P=1*Wpmu y <#4Z`<[=H[X33O &Z3Yc205jT=X҇?lZYv0?uQSveDjKtr9nQz]s/ *JS* v,:HDayj1Sʊ!];m 51+y|MeSz(ۘpGO$lxǓ'@ D%h#N'xD #8"{ 7(8>3 mH"59"^`$&ߛ0UzPɗP#}RNXmN[B..BQA8x2 !,(bX@)/A*NGD$TBžFY*جO8"&X#~=)(k~='rH!cs,FI[N]p2j%=JGz*AeK~=OWzS̿@(ڪ,.<랝B*)ypFNȱ:G81"DR ":Y bg9oh.Ri;HImEO"bݨ:( &%x^&cD_cU~`=qE)f`;M*q]|g)d,)(pPx\oH!^b FUBR<'9CA35Q֋vd#GkΝ"Abc*C14vhE4{82 5.Fw>  qΘ@DŐ #KmeI.AJnصY$/}_ a}u{HTuo/*q@=XR{"% 1|8~bt?ȫm<"L`@;ϐ3$҄Ik~x%I5a!^QZTjuw-;o3v6B?Vgu1RhKԼ W,V*H[O|> ;Tt¥DMol_U a7S+t#7tx*XbDзd rQC6''+a6SׂINHPιBR5ƹ] \3cR[xaĂֺ{L/71.Y28`ݹ'7禎sL3'󄾽p)xO09`ߟWowNxѩ.Aۍv#m mcKX+D\ 1e PMxa3X{A"kr@S{#z> O;'G6m?)]9W7f0U45p@^GQ3\V0V\AàEʎ8!(J#kɀLTA%x&p Q%)1ǭc,h'.>:}LV}[;'RrɨgYuI2.| .v# 6l.c`zE! U8Ggk,> F?;Z@po]@^SLʒ[C)*e͊=6}U\g i%TӪ  _q F(j;@t9RbR AjGRyl:,b2Spq0S1'Z$ SXQ+tW.z&vxk!BN`DwqO)A-M f̗uA,aiAIimƤ K'{uA购@Hk$x z[_; VQU;)Czo_D8t(Fn4 em]!ϥP,r{`1clT>VL;c'I1{&_ yMr$EJLJRu4:gWg7^0 9>lZ[k9M͑(CAvbZmUЮ誁LKZ_m1,50lf:DY X +qO=jQҬ"L*y9 H cN<RFao ;ˬTXkt) R{R#q!ԹLQnʋ:Ya`j.(#fjy)0J 0:e)L3!EqpXV8Ԉ !Qh %ߝ5)歽j՞rl_h(16b]ƀQh,ڝddݩ0Tu/xła=wcDz_Τ1ɕv܍&Bh*S1ubW گi:ИڋԚ (~8]WmBm.dZGcΚuTc< HXiB9\ b: L$7cY TY%mVRVjx2S߸X*J{pf!,ejk4; wX;WJiUYa[j2]ԝv۟t/#= w4߿TUquv~.]|ՏnzMf0;wb-7_=)z*:[|(}}`28۷`zw /^؏tTWw8F}S9?`xGxTXxu}=x՛5SrW񴿏â9g ~44.  kBof0*_Ȍ?}}#N0ؔ?؏.? 1=i9߿e=dIpO%Vs֮LO9/!Xm2Ȩ*9٫>J뒃@*7؆(ED?C6Sa) rt5e U,Uc72ٻFn%W "Ye3 $E3OQԲnY== j_"U5"H1gŕ;*ktձiQ$N+-zL-\=iu`Z]JNqtH)~S$A*Km\](ti?FNMlC ҭnF!+)·'dViyə $,D0vgSRV?x{R*-;BwZ;zMPGaCzZ,{겈p-G㕵W\:W֞Z?Y]v 4rlTvA)JfD䬮qt yc tU %gߧZ2U=tT(!t4A/9~>4cк[}H[{(}>K+$U冝0.m|A@Dž_͹/EӼQǏ?l{˂s ={9&ό3<󳟆1yfL٘JTte+#]Q7u4ҕ+Ko+m!ư&q !M:F_]A8o~;u  p*E3BZM+!KxA7470K[%F(FQ<٪0WD0&[JvM+뇋Fi% =~ۯ$"}̿}*pqHO߭Q7'IܘXe1PGt68F2Ƃy<{~ du{ŚF5(zZkgn Do!jxr.uFeF0h]bR=ϯϦW("-9Opu*b o0kb.]yr DN%# e(CQG >I2pkd1YC@DCXVW"Sq2K-;_Y@sQWsy*}N Xnx+޾}Xi2>dNaϛ,"LO/72y7w'%nfξSm]Q}qz;o`H/\ۭL@2Q-eI͑|}im;oxR1*b+ND-h]vq"RF@ :\^DA.Y $\=}%[ʄ);/F"dHð :)rr?\X޺.>){h{0/P,2KNO/ &nMC or[\@qn%%c[?&G__\_>9zwvbIǼ8욞"3Me6L[Ȯ=wV b7,yY=yGy !f9>mحS[^ Eygl{'f]S,pޚGn$^"vZcnZ]lͭ;I9LG'SrO_L_[ՇΦ~<~}O4|:Z~b..:2,l> Ypֺrz/g,.ziї[:4D8FɳoJ(56)0/䨍 :9|`Ϧ|.D3HVmJ,q3ɓ;ih|nE@CdVrrʙ='0Ό}sX'r)տo-\eG&%|HVZŘ N2^(6["T*)넰(po~"f&3l}qYSfci솒wÝsBtRp4wlNv Vb:4`Ol'UIUaZQt9XGk"3&m&عV;h9;T> ڜ-Ĥ{I CKhQUڲ kЁEY)%iJ`U 76vjgZt|R_CZ4!ZhQUe-"CC9h!EsLr'QL`2AY"9(%(d["ºOԨ h0CԈ~t%YOM8ZlIc+y>e@DV3EBr`(hW{Y`zH(aF`KzU 4AK@c,c;Y2N6'w9+- FS 4TZ-eK &j}Ԁ&R҂%tJČNIRrrKVjFjerpFu1;p{06sF{= ]*4"aāfN9tBjI'e6!Fg 9kvR*iKTCnb0HJr+jTUlYhlbϖs%\T(lh u)G{F:u5"/MeTCj4ǫ\W/\eW֮ӘFѝϹMhڻZw׾#)fKiPH5#zCNZ.b13 j~P bNEEeWSsL$`+F=fv 9ٷ-nQ|b"t!6*\a6<0|ـm= ??XC.uh0܎)!+_֡SrY $kKv=]&c]*,hq3%p`4a21@VJ{Ibh4L=L@ -T=LBLJVdW&eԨ)JL^DCĪ%)]$m-BXy(dGB~Lm;XgȦ%$gQ_6,i!21G%זaPhSrf͔ N(%_&L?ˈjrǂBn;g$_ >IFra"!0X8LPL|8$YmɌHo7(fVUق$1EZI -M ) -$#RT%#sqd0Ol -7B.¿IA_6,GDfvb%#rb8E'$_wz|~zo'x!ov;(s{iѕc{H[kuh$ĺ&#$ (IՔPiZ딐scAzT;DPwm;6p1@ Ohzb 6.Me Qc^)9{QR* %Nს?}L&2@svێwcya)wg6a0Eto>P.b .Qweo"c$'[Ie4}ty\%:D;(uoݻŶ>ca=(,A0,sd.Tֺ4K!zr[V)B&P(i!2/ ;.Ģӎ; h =/ِ9=Yҁ&(ݫ:n Y]/@|3H&2A/*h+^gеzNio>V]O?i>77ƫˋ61p b'\c/GwjP|8Ū3 g~1`~g$?'(,;3_4|(C:+e'(T{GTNIkn(4Ry$zXC;>`d[M@H`v6~kWػ6W!@EI\|JlHbk%\(*,@r.vx"M`3tt盄: qDfapcSs% Ge8~k _-[gj ?_h0=RoU jǚ?㬏 ԕ˘h;sV&\ n:EN 9.F8=_}oBx x>S ŏV 7b>dU}n+ՍU:W-d6/Y=o…-w=/&f83y&7̌Y$Ɋ]E}pUτ<2|ӫo,1Y68?~iK)zNhBThQA1%ԢF1ϯ_ۻ5U ?lB]w2K$B;GJQJ2>l1NKcN C@zĂp &FRgl*ZzkSA7gͧ/Ĥ2E* 0lc8(~)Acx+>QUxяNi/[U6RbXXK' k08 AJIˆjT7=d.j[d$փ޶qma9M5}{aդo_wl:)mϿ?\7QXF>>t1 G!Fo6%IUjIګ'HbLS`~RH '@k#o6Uk,Z|Ѓ۽fƵOU`؅wXt؝בţӤQ.Zص1r+N1&f=f@搝V@Ε(dA A/cd?,"@e_/&xs]/?)Dh~e`uٍFY׷7JD9'&٬wmf.R="9`(M?lv#]Ϗ=v\d:C4dA |B܁cJ sH+7 i$>2UHE:Sfj4Il-35eL % *''u9(`;QmIC)m9}&Z]TpfߥPE!f/x m(Т1jnxLy$qrm/LQ0{{dsBХ$[$;CSC1W'e\UfS Iঈ$4#Z7i nb: ɰ :(6ݘfMҽ7,qR5] nWH(K'1yFIӈ}X>IN6˃J/IZv 8jV0Y1nQ 4O%~i>iǣ娂?OPy̯mHc#U:^ںɒ>W2ohGDa @6BH~60d捾N\ȲEMeV/⯘(i捍V| 83eB7LzT@X [(A|aat}"¤4=Mn y1NT[~Ք} y(lcOB6wFZ_C_%ŚdhM %ssQGʢ\9RzeN$&A/Qp lG6lvEw= Ц>3r |1ʮe[#38(넪>Fhk?0ϥ6nv?|@Ky-҃K._ ~ 33[XS҄Xfd[[#Fˋs:m\a߷cn_4s:a.|/_olkO~`wϣK5|]f^.q~t0)7l]׿M7Ŋ0v.[2A/ǃFw!zn4@:}7G.:;"E Rr &fX4 ZIeVA9ƒ$Q~ldȢl] z/d-Ok0`邝'  `sxWqNa8k }uFSt5iTX;@{9X/܎k bnmfFjT C ? +fQjvϜ-ԶEa;k -޸Ѐ޸v|k!pkg_REV{vW漵-iZgf<0i  3ê5oߞqq4ѯ [qcɮ1 f|xXC&mo_[toȖ@~ڻ'F-=>;OOп39\Y-& Q-Մoengqs3cFUړ]s:&^Z6aGIO|>`Ѕ8A V਒)Qg0L/+]Bo~ps,(|LOC2̲y7{}9N'A^-$?cV7d?s`:8Ǎ PJYxCn^r5DS[$ߚHbKډ'|_ɣ_ ]ykGd\!EP ZZ+T +&U ,Bib VraH!sG4y#żbT'eV_ DRm/R}/pZ`\,ۗ{Pu VkjɊziO=M-WEjϒKiwIS1 B XyCKI]{0y1)1*T1RHPnVrіj7HH)  ';%SeR ްrV[Y/cض[˼`BP)7 mZ8;Z`!$iQMVoZݥ.>ܦE^nb/i,nnFS%sĎiQ}Ԩ._ ߴ8}oI85 m 8͆PpnMeO,N@nPOjM5Hnd[X10!>B˖vI)ڵ-kJC^S*VR#XcSNRu)rbkmӫ*Rpq]XBD-53ScU-'S'+`uV+_%HgOcLܯǜ?/ck3b|,L/E6ry2Ǹ/֎m_>VWDU<[ڧ0u@Ѕmī=82nא^C:܈¬F\><+ZCUINewB:B٨ٿ :tfBFNkdy}xZpV'K'A9^.%" 0ѷ#j3D*UpdbHlNA!BpJ-k`xaNޯ S))4и hG_jXk([G:`pv @@!BʝX[|l{q`D"O)ֳ b=W<,wZaUh1C\ ʧsI!!W\݉qofʇԖ>1-.>PHm5(.v/R,nnF#!)'5b+eD]>T%֬*8%^* ?x>bsXDq. *o+SEdLnsdܤ?],&t}2=(;ЌRDT) 5:CxBC$"R%v;#RF"rDD'qˬI<1YaY2^lZ|fyp4Z$x"B@ƌu4qIq^Tb(3/afƊY)jL{ϵsaNc#O'ΧHc} Q.MS܉탴.pL @ʉq2~!AiH՝2j{ !0myZX:n6L3X$ ,qa3X6X/J2UsUVqf,BuM MRT01\#aMcU;FSc۞M59MTM6  c-nsvs&DŽ\y:u>(#Jp[K;eɢ|􆜻@5 a.:gmg !r猵U"haRV5H 1 |0)>XypKE?{@wIܕO*DZIС bn-*M#z[m<6(_Yo<`FO ggꐨHIDxW7Oqu00H{Su*&jNh"fv] =T \7{hƺm8U".^0I$­YUs1ӃVBiZ P&({8&؀K3OO#t U@ nnuG+)]DwK yxJhsNp(L+rx&?(5j/UM hӏyB&ZQ-jKuƹ$aZ ,MSl,A ׏<inE @ Q$,֥$RGF1FRPŇGARŽ XƸG%Y9CNH [I3JЄkT5G4Fi}myG%G"y5ڿ7"zHfN(SEǽcI+L%nunw\n>^ݻ:hiB_{(nF[A5ԁH%8I5,,-ģ%$. AT^+"UW{;JQ3q(㧨V_*uFmgra=N]s?C@:eC5ؚ {F4U`Aԙ AT 1R;u hnwjwj%wķo-IO*C9({#fW F>U D@&Sp5e^ϋd|PL#9Ym(e 3`,M/i#jO3HEX O5 ?/x LeۛO'.9^z3mrW?՟!ڮ߾{ $=]_'~и{7Zeů_i[CPJE> ?##~^,$~Wa0tӻpcŕe&:3$ϜP)T4VdV*ÀC}^.T `? t<}ֵuW%%g-zv=2qZ" BwkcFfYL2jFT/LБHǶOÔκ&AU@^Pp"AD-mdP!@22}0Nx4{c# ΅D`;&jOQ*\dd0)=G 9{I1/JЧI*:N.D7G$cxg%ymqܡq ']aˇ]> fjF4#6 딥LpOBj+I#L1=*÷d &V xpvOdPڐ0_Hw8΀GMF!@(.:u@_cZ|-Pu@P2E:DQ/rH-/ҙ_"mEkg_ZٻtR.lXyޮg*v^rxŊ'֓)Xح/QMUBBw%.߿ݡ|2ADU{_CzrypVb1Q4/t$>y!seCh$W2C6U-K9i~\[w5ßh A8iʉ!!B44&Qeu%Jg3ZV ̽НZ":zO%F@5'KwP!^bBx~?^bHS.O~0ǘĤG>*Cpf3X"ŽM4eΤ5*P. ~e4s"% z-Z: oxo4&=i*s‚0J%Mz8Ja z4Bq{Wsm2ah?sR< gQxu{tB`w7YM㙒zMɈJ*aa MQ\Y>*vA3!*NIjbY9hS9v㱇Kr.Tz ̣V?Os"x;Ew aѥs`ib%N'*j82SHf*EiW2Swz J@YyzwoiLׯM):pkFcjNzphޚsMjei:miކރS%ILֵ C'pSk]AQ-Mԝ9nԔw*e짊UQoq)hZ ̗i[Xޛ*Hv2Au:ԁB QuXu2TA xՂ=ih]|я G;M "lΝu1g ~_-pVV&#J4eea. ]}kO5Xs<ڂ+ҭ_=G{a#  FeՂԿ?`m70KlMh*UMIز|W6+'xqaCmpj쉷.{wSe QڀE酒#Q"ދTllr]:˧|6Nq&< p> ,Y,H_W҇i8)nq WYRc/Yxq/worΐ}Rd4hFJ z)&2a=ue 0F= *-G3 ( ubcd]uᄊhW9@!ȺvD@L޾dɾgٮ1X+ebٮ^!\\t]Oai0$T$dV[@"j@Pi1 2.B0oT)ZbP $AbU>8 cfJ<:)'MeZp&W)ye%~?J%^Z9T% 93p:8مE]o^cP)uyIB2n[5^R餗 :g_mF1!0MZz%xy^q'F6b:aJ)_IA~< =ֳl0XYCB8U.YH~^\뎕l:q_,z7_g"s'X[Veq.,45:]E$X@ jS@QN9^ GΊGmŎ9JS@V9QIidgZo|YBl*#lo+-&b5aTF[=E_`_ket'۩ά<ՇiYI楑H?M^~+EC#wڼD Skqq9NmzZFZQ|n$>:ۻk(tak]G>xA>w59UYmUz\oU惧ț=::Fz㽉4-/Zwge`rWX<#:6;7[À/背޳3 ^+citG-rdh$g&7Xnֺ8^y.en萗/ܐ~tAGuJ#.Nc)dsj4SmL#ǭuJWYt4ݬ'Zڴu'^Fw]T2ERThCYpR.'-;-]Bt$bg- ,EWfI_ Je)29s&2 T"kgV܋E~7lL֯ ?17d6I&4R$ޑ4a'=؀2R/vʔ+ilCbJ/i_>u۞ZMz]UpAPs&G hE zA*b1c9"DV?|V*HaS\s]'SC)M^{S:bdJ)Ö#OφZ``36#jeuR&G<I9C{R#Vxc34Fp~]3ܭkٖ<5 3]_6adȈ)E"{@%7.ަAQ-gY+fCyp(*vKI*]Xkm]$NkJHR.*#C۰lUG J tSȊMR=z,BX ^yޒ uff"ִ<9X" 1nZ ZUh7@Wx [c1^=zpiDR B?䟃!*}p~YUEJ%c-0e>I4x(F'ivTAd= ;z67e*x[R}l%w/GZ1 v j|Sc4IusX0ߊSPz7 lfS "X6LbPwB%,(Cd{ 4:cK3š7So{Lvl!Tڄv\E:QCIDfefZj.4Ddex&mԪ́ojF>cFt" /]t>' kyT?Үg~mjM!&chmMm:T~6m^l!s6q)@Ğd8$l9͖};3+UTEЕɁN;%7/ŝsI˴:fDrX `cD"̼d-Hp .T=G"_5ȋ$M51֫DOi0ԉPqqH-P$ fwYC T1WgzF_4'~ N`̼l ^mu Iqo7EI-xkjxl"ꪮģr؅CJu]@SA "ъGGF=|~DrE< # #2He5I_x09Fl%e@>5Ds$#F2,E)4QZAa#$G#.@)[Md E,9n\Bw=#eV_s}#wKȨ]>|+>@" ɑ'H6 c]x*tg52S:QuP+-^PFdZD)]K;f4ZcV8:"y2Ei)uDBG@DPV' z0]Hb_粬f`9Xؾ))"oA*umЊ+(SrQBov$lu [qD*RJ[ ֟'y AWYQvo-t[Lm:X)@1thrؤJ-ЪA=ѧϪӭCS[$S}}<ܽdWGM@_]~qp݇*q[̎϶`F~_Vtf5w?\2ZCW9\,9ih7IE*[xIhWv"?RRH8B-Վp OhY~{D>L"x2aW@ b) :c(z ,CHwKsLpKO7Ie|M`/YiO&ηؿ[ 'awְPˊ1w1wgm+Ε~Fr9ܿ?ud}m3Kk(QVѹce%SǑ3@oUF8LE-5)@ه>莠J!p&cuZ+7UMPip߆zMbÜ*": T2c8DPeBl5šf8NI dփ] |EJx硪TMb=xpac0柇eY}NN oQRzWgERlڌUكs7K*"aI"&[P)*)&S_Vƒz3Ø ~kN +N$V&S^DXUFs}G锯OB!g ~)5PRAc)䵀2 L՚kǝd$5l~kNWg3y5Ϻձ tllC@h m<-)yE\"Ҏ^IjVĜ{=*1QnF;;V(dq҂ LLʴJiu(G=wDԠnmjq"%윭fXuP.qNY15(F t7Ix Z2( Ky1kcl}ǟNq'dw- A*A*%M)i@w49N~];K]#fTXd*5ߏBTe/5kNV*t 0u8:S1@"zRSɑ m Ne_tIyt]'9$o[?c^!`fM>^: we zB֏(`ds{!B4̵.>=,jbN1 @ P z8$͊ RWC)t;?;7r~g^ l `zH $̿7<_31#2b3pd.ّA:_&ªJʹ8ySÛ]nV7[! ,?C)>& PxA 롗;zױGu*^aR\S;jԼv ը+~(QD:Wa<|&Cb d#+N WDgHYͿ|/T.ghkꢲm}0FݭH2 u2B? uvX:,SHpVEn36Ӽ p+6ZV#u(~fP tlAv5M^u#ȓc[|"mdTE@ (k:J|i|֢bxA[=:܂}g3H%86o6 Hy&*Ԡ!5X)G>qJ7BR3YDsxAYgIH5mRɍU0ҕ=_jj 8zޤ34Q Tо:]1|j<, SHn+k!i>~cF=(wx;. pN<<|~zTy~=PSjnS\}o.s|+姄gƒ,V6%bYyi2︺=2ƴWVc.(/)6.6à'̿ձ|qvl?ur 8gSA~i{_+Ns͟Rٲt䝯ł89C]!O7@ ۏz$54g,^jz@A߾r3Uy=x׫d)HS,'2{$ŞK {f1[̺g+׋?wv]&L?!x]S7 g֧ V>ATA\3fH{7B.θgh.E↵a 8]bX6YK?^VGz%(ѕ$v Xo ]qQܓy;KD=("7xekLpֵ]rew^.b( 43%~7'ʵFgm\"my+&7Ȼ灒yrnzWrC脽,ٲ઀[mS !LL۝>wRלc_#RlIwCnj35/`No`x[- +"#@9Dɬ| &%M[C}/W޲QD͉D\Rl.al(=f2XsSuA~݅mduSY d#[> LG1&җҤ9qA&Mޔ}(zu*# F Pi9iU? DG%{ =rP(MEկո+ϔApn8U3hx IEd XY A]/[C,(xt;%796*[߾A n$.W,$y'.7l-6=h52D̓ϼ>c8x.؅89jS4՞S5(B0ڏ.B/2Q渕F 5PKLWTiNK;TvqE+CVDO2h j 're'tӐCu! b̕pb!ː@ONmF=W패rJG5(jtw[Wۂ, jw79\xIfZ4J6G탙; Ӆ&跃QZ z"7,p0_fev5!wYx;C0B˷0 Iv j36(WPkejÙG7 XyꇜFM~gWÏZ׍ +I~²WH?rsWƜ܀{,П&H]a?ڽ`2k#:hl-eIW$%%J*E F(]UaZMCIR ^2 #|qvdc)fKd {25aɸCBl ?6'XV K7 Z փ/aMU)znӀ#kp_#܇[+ΙнQVs%[$lU{8}%1s;ՖŶ4ǧE_a{󿻑}~]sOq6# H l5&wԠ"`dblW/A0_:׌+LliC(#(D8 (Nbƃ=%zi;h1>KXDa Dp ARD8 #) 8 ULHH~{ÛS!ib"<'8 GhiZ;x Bsegpm>UOqUj@/(g#U~:3a2^Z,werKǸݨӂ8Ga~8Hǫ׏qrې@Y=/fZ^F܎%ׂ5ݡ݃ZO ΜubNoM!` B\x,9X^<]U`*D2^/G@+ަI1M- O5/^M٬(}re }olcdS,svb/ KMQ'<r̺^8by(򘇱>ZI"|&XO{.D!CE8-c(zjtT2wK xIjpRGg=k' L-5޿{_,ϦIwl֭^91<;]7~%ڎpm; )FKLT˵;@ŨȷYهu~uJrXDU0.v/ fGQhw>R_vIʻ%K"TkLh4NBb3 %$1ИIP{^)w=&JIk@ft=ݡf6v%k%OѢ9!Ou{sjwZp"=Ua> )ދf5NI\YK~)yb{a'bRqst4J`vX>JNU.3Z`5A nAv.i֏qD)cQmJ>/뤿dI}8jd׀Z+s BtGB128lSi<*[U~v-$nN6~TAn'Җ3DQw;w׈Fxn2qrB `' D!C"(HQǒ=D(FI 51掬ڥ!_xsVuo`ѻן&-P$N*-NOH7_)&Vx>Z)߂-ښbցۃ僶%;ԷP#ND*3etrⅯ <,lva&@}p[d>@J*6`N|TJ+޳NPSMZMƠ:=\_nIW9$@M(ȑ5:`]owL1d4FX}+&0V˟{X۹9^LsLoѯM9VIA=%wv| NR8P.)1;&FnF>'ѓt;9/ޜԐ<:ep}JvGg5 4[b) C{Dzz߹_&)%%`;$Hą#tj!Ug^"95=9^^!D/IxeV=ĵb>EI3bˢon4lNp~W'0LvRġb=ȟqgsF A/7}AzRgany! Bٴwco{ A뉣 j,iB{$ %1NVcOxכĤWiMJW}<&^O>K̐!tvukxBFz|v2ƚ mD.ja-~zXYkŪ[6Pu_›[LΚG3 2eམ ɹ{ru"g0:ÌOb1lAf%W8.^B6H ؈]>bh@HTPQbzK+HMEDI8[bvĽe1]:pa+K_}Z6$ 4+DI,!ӌ4HI } I_N=ՁB?(h`X; M徳Lm!hs87v7u9J*iáDף:"AG+:ORC c2"heP۩EnHL=yh؇\b$ uJf M\[6\Xrp _v{  ?b`#)2!UpZ@*+lGٺc- H sZ?vK8Fxk^\Pᥚe/]\U6w9RR{ '?+Uߖ.,Vt>mYnA瘥w?y I8ޙaWk_vVyrhG3JoQb4}^GV;hc>LE,Z|g~B-fQͻFׁIzHHN*bJjN4ğɔ/XLVU̓^<` v& ҈zi<ggúuq*#\'+ j; ?2 ,'ϰF}Y$ǠtH c >:_58f:D؉ðC~}D+J~c1Kj9BrװZ6W !BtgI:5ª4tѲKv 3Oe]EF1,}LYf`:$%sW!1뽾CMa4V=/wwӑcmF*)E0gU0䜀gMzvIe 6uܠ BH,R䎆T{]Z l,5E mD[[n/'{CD!C!ZSI_9yvJGJ.zޙikW\ PnM0>9.FI%E4<.Ԋ W#ɗHp2R}$"䮲Jہ::&)\S VdeW[P_\ۑfr S(] .R΂$tu暒{@QDn9z1EU27e !xvpлw\ꔑa{V5۹LNf_ ~,h^FzZ$ǧiush>VWט+Ѓu^t *wdwfPl>iidaiCdlȨ2r}ps㦎hOū$1lPb80K! DR ?%Ks8skvx\fܘHšpN9֘ Yǣ,Lq1*-kR^uRIs2;N0"£4 &AB& 1-^qB" rLJCѮrW(&@k99+Uk 52澬F]^zkS)b^q ?l1,@&wm4ohhZk)_  AMqmVtq 5\2ξ-GsI 2O91,pI,RɂTQ8Pa DA,z  oW92o(j F0c;I:TBv}plRJ:>?X2BKb\JJp(T  x4'$ޓEzՒ1(@w`K$a)6QL)R'aР>y^ܠ6$=͙831%t)8~ZZhE`u+齕SPHCt l A O7@0儵$U2UstJKh5Tw֌g?iRdBVF$bpК5Rۡ)B"œ0Tar@6V4ea$&"cxtdUPn}/>_3KugƻF.?@njQptI1b'VĈ/e\L؍;1$ňg_VwLjInfLj݌qts1Z"H\{SL˜1S8R+ -7?H#ݽs"u8*/#s zo-5.viL(EMa5c1So@WmA,#;z|ă*L|hMS7MBw^Ǐ2?Wq /6yc'\j"t.Oh<ՂY%1&%1Ǜ7U<8bo_شB/ xKƼCebraZ4т-pZZ] wdp+?N,d)beI\.*PtychE!2=`$fĽsqC 7{%;ʀʀr*+G(^ܞ(oF}Iߚs4"*#L2i1` "2Vm=`Gv<`= Ug EOmK#c1mw\k|ϸG8wv;a]#%-û-&)ݭW2NځIf7h 1cnZ +R("X;2 VgW:YHVu^ ֩FV)TrPb8.ڠJPU $ 8إVat.lңmz k ߻采흠ЪIP= uJ2,rȏK* T -ux@uۜ.O!@Rq|2kAK5 ц4!' yf%!J0'}R'qA)=C'oPv;eg}VSs;迯bg붼N$?[5ӏ zv~.u4x\Gq]4>S\cbk{b\@ v^ JKR+R>EulP\iѫ:Ћ޼'dc\<޺6[yś QX_ح-_en*qؑ$pR ,sx pLqH07n*c 1m*BVĥn$ @vJE\)˱eT:%+!l3psY,ail5 I`>ᢗ"^L.Mf.ZRP$gg*-sZzEZ)l<9%$81$x*yPx,$Cј㬗sj&$1NVO|Ul0{313o8ggt#42x*;\p5zjEI_w.L2'_AᗗI |=}xuY_M_7C5}~lo}8-ReO익|t#6:ZW lH=SC͙,bDfB58k6n}|q8hU,P"ZF$a)brydv _ i ջ V0Ӆ%4Wfɯ/ rV9hm΍+ y Nte{qIc? G5o KaceDyV6y_/*&K8]G#N"NdiU󋕢2xQtYQPL)Lb`\_̭&H.@q?C^%|V[Ș7j 'QRe\~[UkUOdhM|UyUl53)A9߾|LՓe"g?ݗuW }5h2[Xsgp@\<8>]9Mn_&OsR+Sw8q1651;Ԑܧz ![-ewFȂeYk@xݜp:dQzz'6V2H.gPU4:1} [?NDG&+A kۨi9E"c =Eccyu&'] w]0SWGۏפ^'#403&z3=Ch>vq~f0}{741 ^ gw<1-aqwnu3[M6hCW.^VMoGV(e2GM4Fae洕\i>n "aN=v͝*[kySMNs_HLBHߕ3hwng!h:+ VsPB[bE."}L8.Y~2]%:Ju F2S)Q nZM0#, .YU%z dGmWK@R AJ{ cHk˜J!Bb q Oc!\nT1t*g"e}SU U X8=\f%=6ڬ,HoNJK0Np%}e$HC6+wB( tWӉe(A dxwjwi,<ևa=g$f,Ct6sTl{vP$AMo..?p/C_F.cOYBЮOrRLFtS !lDcG3*P0\FT&g}*3G8 uSpP#Pw=xÚ,%\zlqQIDZyOSz繴,x4@0 Z99Wjj{c`8uԆ3ϼ fP+Hߦۻ4U.M `\l'%8̇]=Nʱ@`X_}譄IԤQ c%unS+CjA= n,b# 0X.L /Fjň#B!$֛i8kg:[M6R'O)Ҋ2%zᘀ26^imJtX#ur-uB^$#HM;wecw3L_/W1f=7!N`oЬF}%g)kN avZC!~Ӽ MǏit^~&{~B{-v!LYПa22L4wslm,vh/2}ٶ.(|</lvax0" g9S)k% : c)zǬM"v~% c'z#pR3h>SpS>Q$:c(IHQ {d? dYޅy>yHQf:[(t҂+Lq=tIpw/Sm'͕7vvm @B&4WG -vAsUzWU!l .: `StASepk`^w1o  Qgi#IOE DDMo> q݆s*O.CtMP"D("ͧxt/JQBG'.wWѦl1Z.@Xh DI1))K@@)RX a1! mʼnmVڶ?e`'@+2rZƄd! xY5~JPM="أ@]zY9Vyr ^^x ='Z>` _AZ 3Y} [)@5aqc!NwӇa,xBdޘ7&| $k5PEBòn qg u:S2hG-̈́< O1\͋)1ȁcyÒj IڌU+ew߾TAƥf' KI C<1$D\gYa $fAL oQ 4!$ 1j$JcHNPYG K,?y="-28,Eiz&$ lt>YsއH J]'`N&2! ˈSXaxb#DBbT) 4#PP톩XGBp*'7X$YϜ+X$ܬ kUجEhX͈m)Jb^)JR$0œ&)eZ`<"Bs&^fQ]+FV[ pOEܪ}[uh.Őĸ}2++,Nо![Fwe3+qClvH.H{Ecyg`S| =irkMX!o :fot,@Ɏ̢ApSWG۳5gěǩ| 1l=k?{?Fco;(7{OQ ÓUЂѪCiDnӭ}!J͈X6%+;OWl pCq31+_~D>K|bj| T ϵ 'Pb/"^*Z9qޓu$W}Y63X`fKO[!&)I$&a6* %5(2 !)ʍRrJGX00L)e=Sר 0O4LBF8#:mūxpHN @YjNM!C-Hrۍo)hY>g~5t3N&'Gk_ѣMd?W4(ĩ<.f:y~-g, zotpihOt4d۽6s6_1&qI;'5mRY]prf Y-q3g*j%Yn)vڭ%S]ۀHn -nmp3g\ng@V֩}.m(B[BK[YtR5 l>a2C-7dB\e;ūKDo MۘU/GuѴ9vë]gAJmN? }Um~%+[IG^*s%hl]!C*(B󆒎CD2J:Ѭ 7Ɵ%e+؁ZK&+_it,hsck+5vzz._[ P Q@ahʊt 3g%G Oܪ#]^#ek ]378DX|IߦЪw Ԗ]^Z_`']((b-8`n!qsM\j.D-LT4TkR4rTV>D?s?9[q(GJ7~J)m}[=Nt(z8z)DL]\]W2-nXKV[I%M-iZt6n[Dvwhw|tqeF$-TL` \k˝f*!rCJX;m%ڏծk|  6ѫ_.NS,MN2{~E= vZnuσ?moKhɻlQX:8afR5Fu]#\d7=W#TwbzS5:wDsKfA)O9_VNxRC)YGd{br,SBF\ (t{ )')zskTdgोksB'PTj΄wP0 !Z wx 5%M1NA8Q ʧ#FSUND+ T@-ܓCҁ9%|#6N k9T # R1T@-c-YJvG{&!]vʍ\9H^ڏKPf5Io;η[$;VT^ 8&{z/VE0zOldӳ:F7[ Ɏgu(qa?u4A@s%渲(3:ֈ# O8{'AnX)+Ix6M:jy $Q ʋiFAW^5zyi'fR $)kڝ  YKX'blp&+ڨu(Ɋ+u0diAy))ɐDiAy)Em N@M="IDߩ$--\5Պ=~W+Dkl,09dFdiQDis aSɗ&U"|dQ@9"L$8XbE!D,^n^M jU~Iصu0B\oxXgLJ*ŝi_돊@Vc~L}^%>LO*z(hTEsFxAZiPca$5cnL\2N;Ku`{xkۗ$C>TY{H Q3"ly#'AB5/U8Ls4H `y^k2Xa]r~(ٙgu$Zeqf[c(nL諮=p}HC9.qaj-Gv`. r7^"7ף뼢兣<[ck;tr;yu~^-*RF[Z>bZ^|Y޷_rVot۔/+Kкŗ~k%whcɶ)_VBŗܡ~\j do=p JL7J<)يN¢̯hQq<=?Coߎ;[۝ӼswzND6wz7𓳫1%O&1OZgTΟp ^K2OY+M 缚s &~=z7-MgNAG.Hqf=ΟV?fgfb>BffA-Y(5N Cs|f7YZJ 1 .^":uV@nGlB3 ( <7*ԆꐫJYAU\{ܡ YY!Q=ܙ湖oNzXm Ԡ@)4pQfT5"pmBHV07lEKB!WZ  eU CT†! b y˖P3ѺxP$V#]# dֹɣtL<ܻ]SaQ!캌iJ&1ʏK`T7!A9Bx1bpi6 vX( { H^~W d@y2Tg-^<}!9 YAQ2|u ꮛhq9T_(U?߇WkoE4i,JMQSC9 J&YS%cAH+k`xGђrRбAu512.a!Џ B4~zsx{OWQ9LVh4DAg{93vh! 1@8qIEI5YhJDJQQﴶY*#j1YiVPe0C՚|$.8Pc%k0O `l/ߪyh!rx$ЭGEb(r\PܣLRyQ%ąP,*N .i>u鿜/Hn>vѯly(,6y9R>}NXn$(υ߿c~+rb1L."Buq0:n66O?9B}2Ζ -Q޵q#E_{=8awþl6{FSk&q߯zLKv{Tb]/<7[oc{df_6*q` _Kn%< ad|*{D!-J7wu??sm#,rBG(d1՜)0p3^B6]"wR*5g,Sq*pjQXq -y&] Gw\uw`]~m$DJ|f`p4 HRhR,cq!bR9$M/SM0a~aįؙCe F[jMє xQ38uĚ0 Jy`|߿?rfi*zN~RcHI[щU[暪LH]ץ` " E03oa!V%3 E#8 q>+v`[LJ`_ L&yպ֠hr? 0b5P^$`s,$%H1!`;ipT, %ll@3LŶ왰O0աVn $+%cZP.dU2gf/_{g)l.o2$/Ϡ.Pk hm{+*;{ⱓf$7CۛO[ɹ |(ic{{G=VooH]>f3ke C;.MnppM IZ׈TsPgvf$ߢOoԚB <'>u":@}l $"X7eL߷`E&i R,j:=|h31 Fh+[>ͽyg(m7~ @7Ӓ˚n;J`XE%(2$2U%FjViSixsw&TD#D$(A#lc9"&hd ňIYOC>ӷm= TLjއb|-]&{ݶdd^px3õ`nqc~nϛ/c8btK$P@ހDw oLMVqr}1]}ϧ̯|F4׷Mp`7zc&[ǩhZ_ Ε\Kk2s\ KuiFrbO0s7?ǻኊ8`yX?M5 \s"?߇gZ1-xw<Ïgsleyj-?ɨq/$l(I\),>|AV;MX6n5i[jj> =@`s?v63?SPiQ uCQIA9<(GkZq+ID|V6-ʰB#[E+ gއX!Օ9Ӧ {ȍc?S8iev)fT#5ō+i-i;%c8Ev 6^5^Ư.׃kнHA ?I˖YeֿTn7궥QkXNg9JܺRʈ{W??8l zV)8'9ݟjv kEO/;0 :SXDsXq)BwM8hrr|9\̲XZ̰M)Nqi)F&`fRe,&1Ɣ KN#ƚ՘&k \#DYET?~끳[%"G>5O[/w3g|Ub%Ӧ5?ۛgMMw u{Oҷ @(ƥGiW;sѦ5(d"R($|8, BAkd/TbvK{ Hݻ?~rlz?}>˯E(g%<Аv qw\Ҩ*i01ؘCg}IlɕP.4}Ʒroq.E(h:7w7GZ;7M&"LSn3dh>LG|T{Ls]Vo&0RG,id3r֗ A&:nI0 S!VoBMPX<s z듃@6A<ƘR%-ڣI3MߖRKb[eݿE4_kHy|xp#}Y쎋btmőB%M1<0 )$k%b,D**X8ꘐqBʡ!bl"*apHFDN$(uq҄UEՅ@_4q5e4]z`k b%$fZQ*BXX"SƸHcrЅe:ՇE>3-R`u =>=g%P۳oM {L=A5F+Aw>°js{!̢^j\y{aVcT'\.5%ѫ)D~eW3p4ǘzz0 1;;xRK;%{9r+6]|MxF}[0+x٪8,QHK!oi҄-(} F_gQRM${-.N[dR<N$v)dy ]q(3)pb")BB; ]-}rx֩rr_ @&oߑ@#7^}0 2)6& <Ba!w`JTܖ(\a0QnXۋtĄn oy ;rZrNb,aR61x0`T#Ĭi~G+fo nFT)?O~frp gZKF%J4b&R *ZZGcc'jCPQ¢~%${4W@M׋QgNYSkP$a%]3rem (l) O4Z!+ iBRZy'N0SI  ̌Mؤ)3T$$fH BnOk .~{wt͏KhDd?X BF_v!&BQ#<2#<O1hϷ@ۏgXGSaǚKa bB R"pX$ 8jiFF<26A5`*OJ "2R$XZӖpűFŎ\&(DLڄ_+bT Rm1s.W\8cx(˩2& 8lR,ybNyH ?w D#,lf1Iq +Π:ɝ(q&Z _a֪% ƩI@9 ~D]P-1'2IHi u4+l&ǔ_6G<ˉiqNTc?{w .wMgMPәRRx8(m>QY?gPÜsJ~9GG^Bt!;Aw],1$['ji B1-;6˩O#3iY=fR[g:\N:G-Ũ͔neaMzuWj\~8S; E^";KdwUP?xdcOyW.V/Y^߽C^ Ei!ԥ=n}ZRssEtUhD1L)5y[  b,훱Jw zgm T)g=.Nfڀ9{(΃S`S0J{d\k r)0!8C`c'(qsu3n7a_JQa{뾕-42(*+i;%cH:G{GZN~tZ1oBlNo s}ڔ#myiMl) ;:^Ia, $z)C򍆻{66嘿~9 ͆[|!"1Ra.æ`¦\(NpIuީ,F+%j2* A=ƻa fVs8&mM^&TP4}U{%W +ZH7k(sf g^2:F`ЖhJ!Xn@n~9׍ق=gʚܸ_c6qIvL|ezgk![3bՇ"QdunrNRfެ.?֭,:kLr0xl-HT3QԾb{ۄ2%X'w4e7]>pz4n![ebEl $"yx%<8.m`U%h9Ɂ@4⽲6*ހ>fk4:r?^^CO\WTuʬ |YFq7`yQ{"w9qD M#$`* X 7I(`$QuI;:TKnMt"%n;Eqz\Uuzu W08EmZ)xoS ^.a@Mw~>4$Yu"h`z[ϫM䞟{h._/adjem7[ZZBipc:#@. ۩&4sUk{fmܞ2'zi~GY# 3/b( X:h.0ƒjFLɍD*OVR"i=KUxYңFQ:l;"D#C t#2F,ӠTq%'4F"PcӀI(&JJ@hBq3ZhlbLeDȲՆz0* ʻA(ݣ#+ԧ엂+z{Y>u_S_̝|y}g.Ͼ3"" 0p LY"f46YĬc688H}|SݞF$Q $Fxnls>5^Ok,NFSyfdydyCA; ,/֚*/Swu^a#NW%ʋ9uʹW^ }?Y^̛5xOs$׋- I  B24f:̜QFbN[In2" L8g'թbNw#NcOètnD,/Ƨ^1#'ˋ1'ʋSW jrOy*l7z=V҅󑹚\~u?;V ןdPl돯=F!j Euj˜V3X=Y f.ԼM+~H˙[sxb+SrgoTcIPaU*⮎*.yHk)'~^סh8~F)=)~F}ntӞ/ GclGލy59?b]φ ܟ[z6[ΠY(j~[04zӿ Cr'W뽳4:XND0|A5r2zĢ":!N;cz" X88:`Q|ʽ -=z(:t)*<}!>1+0┳xpv$n>xЋg@;-5uVO!ʀ͙m7`mKI)e= HB;pE"(]Txϓ쳫j횘=AnI} jʙd:6b,Z)D[FeVKaO[eV!{jHv钽[B>%ZF>@ovm֛UӴ.T>3KW.XC|ΫAlfb~Qߦw?^mGضnZ*dbj6;EzM2Ǜ 6Ё@ ?n!wŠcc^9Np4gKօffC;eZM*y%SKYh @N-A4rja^eKlZ1x iZr+Kne@'PW:jm@R%gluj oј K~ORxh5T RrA:oŦK@%])%\b̝ZEj1fJ1-1ccB1fuUbZ|x1f,1cFË13r c>3#/%%\b̝Zk63)1n-Ah13GZb'cb̜`Rb%ܥ%pBb̜c.1n-q43Js1wj Ř>b%c>3 /,P%\b]ZJ/,4%\b̝Zxx1f`匘c$dx1fP\bZ;u1KAX1s ^,1c`YsN1DYQcVkG%|1f%=wG<ĘO!Le[lY]T }Vfwx2[56lqV}r{Zj~0JwӸ/>ǩ1밚5l?9\*RbyVMd/g$\Uջԡ +?]98X(cph̴cňFpY;M>d6WO!I'L߽%aD=:̊pt.u,WI KPY嶽wzU^l[Yv!=u<׹1b)s4 u3xrNwJyKʠsl?ol^v-$JÅQAc4tJo('DU0{+1( " PB!2Oqt)#yؚQKPs/ۏsf!|aXP;V}u3n3WWfmҚk.gIFL3H/TO9eF0V0iJA(%D#iKDhc`!D j97JXa`d x: Y7Mt]p$ A( >D^Gq$"sQ +)X#(L4S4q#=̈́PP) ՎYcDTJ= IN{|P2j":BPrIHi (aE(tA B&zw0l0=w`fQăhB4 X!72 !XR'4-q FBt$ rDHyx!o !?4 MD P[ ~P+@ uN340ÔJBBu&A B@W,RQ#! : Q 츅aj%pZALM8# Z.l@K?G`ն$ `>ޟMRr# +Gof}70>L4ea6F3]Ωi~ CԿ| b7-qz9&_ǑWUs2.4"_ _%7SSvcc)аYjY_moºenz1`"_dj.]ZS3E8n} (` qJˋqqlyȲqHunέM|t#>%uo+81fȉez 0MZHDc;4'_7ǰHft j2ͣ ʨE;Ypqr߭GI^p}RMƁPL`C`SES+f jǻj3mn*fLwFMbβ fҸ!;PFhz00]xoFHRoQMqI5(s }%H* @zBhAN}?n:Sov}U6RW[#3!2mnBG=Wa֙r LbKj P\ @,iJcMNՎ[qx2jgHYV{޽Ih+$?wreB}8RxI^f33dpk\\뭚5[S5SqqIpij xpCʢ<<@h42$0nLRI}`FEWP1>0jT!(5vK_:w$ΒM'C%|=SeƘdL6?cqy3O_=S'Wg9hկ藊*ٶEQ!k=@=8h5I/Ez24"BB(4:N)#qWb9 O恋dr*q!:f8? q1&SХsg=Ћ_D GDhD6㈸{ h=蛦 5aMxwqL~ATY8&<:.Gxwq71k>p5:6_M> #.)?/xoU07zyg#Wg11Li11w yxݱM7޻Jp_;v^ϥ N4pw78*bT6GpZ7?..n6=9yz1|GߧG{vOVh L.Zͭ~w-CJ5+oa2^zbT)8$^95aS| q ǝWH WbcͽO1:@pV NàC뭥ek Bͮ_xzݹnׯ%mJA6_5 +X @ӷD\'Zp RFNנ3"3vS縔.4:[)F ok٨`ǻ-|3AECAht`] Rp #l1@;m"}nk.`M6M?w?dd": !(~gݔ!Iȅc ,XFPdFNja1oTWi|y>6FB_F3u/T0T%w1Z}SJPԁ(!1zKJ>a=wj~+Ӏf o J]0 HK8ec)Dټ" BڠJKQ U]V.X57G: -Bmd OxWGq%5];kx T_RFlvq+d05iAQat}v/[<y-$[I"/ˡCWH&3ji}چ^ ulM>ŚPuPs5 L%fa2xg]b*3Չ$)pMQ>˗I0ky$[A*h?Z %i6\r`( i,{bD>Ę2Y"2U)^V`@#Ɂ4:2]O'9C[*1s{f=Ο~FOwyFDC GbyzWo~8=ثG09$t,> ?w?:B\onp\i(3Bu˿"`J G3{=|xuf+D :1"LQo..UYPi-,3hWH"E7`t(\|ՔD EnaRMCJ6TP67D̙"*3;D=X$I%3mFE!UB8m~f[)1KsVbiiI_KlJv$d(( POdIxa'I!h *)B7#!0VchlW >R(%彧w$B^ P'tTL/P{]Q>J9퍃way;ZH[6Ϲ+.iJ*&OA]R cΞ]o b[E(AۉX` -OUl-UJkBکia#>(#œvV ƻ$lPZp]IGWgK%-F<+WĿqcwuJ é(u<  v2m- [̣Ryd87$\PDҐJpM"kal섔hN եh\_%_W?uY]EYqSV m&`\n10&Jt )0ӷՍbb YqQ=#Fwah[Ν>V:L=E:0!s_҂nZ*VpH(p3fh QgyBi#8 FF؝$ ^ aT%"P sk)e4ZA @2yp>p*vVۙ3\vph~pSs kF9#:k9L~y4g;a\<çMx*I&8q}}'B=W=WJwM My!kd{.>R7#lp4Ek=nK:ׯA#oap`x6 p\l܄764/|J-I8߆}a?BXZ2%3؜v-WSH:݊}|-|T -`*wCH5}=%p]mxdV4JfãqI$A?ogfs@\->,@y}{ܥJ,@R+&4ϯ>gEn15`LEXPFldb1a4^P&ŔۉV'rGc-~' XDZ(f0֐aHe,!FE0BmvtP 5ܮ?S6Y nkC_ޒ& QMJ,6c;$B`́9XatpLdMpz7^R!hb VDe K$x7loRnTjǯG%*fA}yPns6oPph/j/b6p=n{w2mx4\kv\ӵr@^X _ Erc1ZyZ G 25:BRUYn.bKeRu7ؼ{72"2:#RQ%_u Đ2jmҵ{_ f^e˴+NZL]ȿ8v ʅOpVg/M7':z;6XJ0!^XBra._vg0$q) 0*YlGW .1V>۵O d4&ơSiQHL% 8(jcm@TsNnW&Sm)$P#Wj9WubVn5 ]i6gN:|Pw;M$ᶝuË :bDa|/ߛ0e&߳^ I3ٹc 9l7>2_3//ֻ-VZlY#Qqe# \X9RܐM3`;[9 D@?эH<] LH1tPȐ@j>cC)΀zwj& 142A8 UN, 6k_e+Cا'nȡNTH5/,]PƷsī}gyCCbds,p0bɒ@./fsF8t@T9 \m(-PMBEiDj:S(FppdQ`e" e("lĉJħTWFGoOŕ twgvG#SRrk?mjR(̷U' W7 &$i\^ %Lu{tBGc[:nc|wtuF.g|j>2""#"VV#JICJEhbLBZ9 BBjV%ҥ 2Ղo +lUe^X!,h*ELùABT!vByj08WZNJ%qa,#%IdLKT&!ʼn6:J"MF }i5·~Z/K(Q&M`4{(gKa:4:qFت(҂E".ߵgH&dFy*GDy4:h;팎v+*k&L3BSD,DbGEƐ' V_Qxb)Q)S4YDYISw::zH{|~_X,έ%˒Ӣk@7F)OqIHtBB$бMLDf)^wFԻrYl璒VRa4(I` XhLADXL҄+RI .Be\^LE(UlzmWZv_]HT>,,RT:}+b^>}LV)7\?FCnRǠxD@d.2Rfrf.H%%t ˹ɩ6 ŔtM,qd#2 XEi"pYSIY*^{.U ؐaTI˫Iesmb"b;Ybי$sKpO%--/Z780e80@+ YXA`'B3 *iJcI""I1e!YECF9rZ٬D`s[.,u&`}__XH̄[9^^^\ӭ;vTεǢՔZW+[fOL#l^euU5.#q6q=];ɨeD5"O%cuVa1!+]ֺtn!#\Оisml_|yH@%:~k]Ri4c]hBR9扝UyumK<%M,0Kl R+YkCIx7h􇃛n l8=:'ݎ9t}'d6)o 8*x2B" j W4F0V2l3PDE'լ\VRwcp].Nұqne>&*MPI\[5eo'&7ʓդЖV/Aƈw`=S-mZ xl:(qb67;%a^T"$֒!C N5PdqbJ 4gHeT)# M)uDp)(t,-;U ,55UEBW:~(ƌ0J >F'i\|'arݤ m{׎ʷ<@~]t;y<:> 8Q]B suY(o\$m=\ҽϰJu[GWC{S4A <9?)/ݗg3^d]ޏ)ƒi(hlJѸZriGoghfDt;h}&/m3 -+-OXĭ 2!&hJLę KkJ$0J蕧MIUt gNi tC!5W܂>Nm,~|6~f#n??qug?/.^/o.~~6|qꟍ:=_ahD맀Qsj@!XG뵪Fr$|׭4H.C ́fJw(nЃ2w~wFizcKHe`ǿNS? [Ǘ{?w:{ĝBǰQz2}e w޻._䟴џ.~zv&{ho:g'}ϤfzQ:$HKqC,"&DqYXf"!%7h<%2R Nïrqy~uqۗ?>{}>a5q6R_l3j=umP^un;w@xx5_isy8DvOr_({|gݰsFcO,$FG)"a qcr쬎^sMqwTȆb}t=])7w&դa^7EnL+ G{;WR/tj>zk{9f-HUlur&eۉ>^&`5^:U0eA뇅lv`ps٫WaPm_uzw.Q]l q~OYwq2;c Y8|L?&awd~i[ (&}[?Qv%d! g_ߌVI۷7yz ulf0bW F_G l|g5Axuzl2B?H乔I)3B^W^*>YKfwVV&3/675"yO?ºm !,\sGt%_c(-9P0U,s'^QʥPh_j_/޿/пQ,/2dxa!{Lbd_dI}f.^o/8|.5˭+Jg%qy6s&d]G2U}>I2v9mhoUlr =LO1UlfhRv;ݫ82Td\Y&ћ`Osev2 A! hl'h(C0A)FѭBL5r^VѬ&4ot3>VTYThyF 1=]TTgf4 NeU # n AHž?TmBqa)n\X&" Q Rn9ň7XҪpfv4GF0"7TQ9zц B/ڎ%ryu,SR>,}v)\&/"ny=P&1hcE. ʧer0NCpyuW+Gq9IܗQX+:KJ˖?"lR6xT} }bc 32>")KG"&@2Mu84RTH|2Tk/թ,u%JBɕeJ1@16ԙ^@J0b2c3z[w )a͢>ws3uAʀ:VF0qG4RƵFa|Vs6lz9}VQJ΢(EWq( bA;;|==ͣt%F x/\9Q X(1{  `c "JH"NԂY|#$6!8B #JS2VL 8`Q`ӡa)18hCk5VxVQkl)0qQ$un1E#'qiK9|0N>~ ܀KZwu֚ׄ/kM<ǒ/1c8 |b ouh25$qg$c郻@̨&S7ݾ ^U*.lu> >{7ePڱ:-# WGkLq!WɀV:Dz]V'cE K ֛vC_V@a"O"co| DfG#R̬ f`f2YMz]Zy@t]kӾ^#kD"}Ɩ52pIs!i^t\ ?bLeN/qm9}z}ޙٗwۧW).Z-пUݛLp_y&EF0O3F^"À SD %5Db>ݚBW1ZDK$H@"Aԏ!,tdC(}pܱOE N/Ϝ z8s>r=0vѸ' )X89Y>PfCih#}yh~xBM&_+as^"Q0)xL,"<|NPRk[%? K al<&'$lMR.iTcZ#(1ӻ n0dRW@W ͠0|5s":3Wq65h>{Q@*pѫ6jD%ZmOaAIk%}ay&F.)F䕪2F[ʝj?n…mt$ :SO)B>s73>A;%[Qr3\vȌd?R@Kik+I݁eovrcƑTj ԧh LTNׄpˀLp=qb;8SmE+"Sc9<kc8!#]NiE$)]Klt/V|<}?dqWh8|闁[Dwflgb_FyS[w;,ujKx0QcA#_6WfK*q V5Ф鼹գ__JE0@V <Lw5[@G[S>^ ],2±\n 9׭MsOYUp1Ro(nGMpU( 7(y ?.&M0Z@+JQXˍ !x~nMؘ .fߒ["Kh V$3)׮}y:Q)IAoo§WۓkŏM>BeBA$ec͏Īb n6b)JO2!d[qzE,zJud;虱 o\>@@ŁIT, 90)V̿ d>B32k*~EbK(- 2bre,B1ETx8T3,Ca4ӡb 1ڸZhiLj(CQJ 2g?ʲEn&$'cm.)O$s~'+Xwu B;4__#;+Nwۑ:>}1Cq@aWIK3J :8+*%:`rP@LJI񵕁>6<>7D؈ c0.$uP<Ծ͘} Nq̧xYad}vxܻ/GQ.ovX%=uѼl\.oZ?ߜ5?4~_FI,+quwƻ+c:{(ba;C/Y=l4ϯOgW7WMv=-{4 m0uB?!~~uZno4/W?v7 }/`)zOgJ#gީKbLS>_=~/<ĵ~l\Yh7?ν'@Ǭ}OSoڶ^ڨ]+SIӇƪo.xv{^ڑd7 &ⴱns8 /u,z0fuE$dF*xt-ͳӿw7ws|yV_㪟mGAaf8 J| \Ok}R!S8`:" SзԒ `G0Ѻ@>=10:l5_/cRZg Ηy`1,PZMfڣ@U\&{n<ῡv5`wN&F{ryǗ~jw{qG 4i%~~JVdBCs,]a igh_zu_dRN~mҚ{i Zu6[ ngtM:CK&xgx4*{UP+ zCe^83~z9q<eN;xe헣o4E6_yHCpTGq=x8$ilFo'uٙ3 //oڰcUݽՏmʄncDY#8l׽FcX&r@S:ID?n8A7 41Jnc 87>YcI?%e$%gЃ;I4)@\OFX-p߯o ƗxT]il~[yXOǎ|pi*6lgRCʷI?Ǘث32^nwu}>"nr|WEgƤ,v2yi^s\蓽Ou=tk06}Fl`(+Jp"Idd1|Cu`}ub$`XfulതELxbQw'e;Y\C5Azqtkg})UI:l&i֎xKŮg5oa(SK8OB:RsBϭg_v=Zzk8@)zvf.f:Ej4cQDe*"RRm|1LP|&mcdA(obJkl20&P/3sⲐh/}1RO8$ޕq$ٿ/YyD^p%k@/3 {Cnj$7`A2UlMіH2222*hLADIIru(7a(j^%7%vE2 ;[ɔS).gڐr$Ge .CXFe2O1ό yb4bȝup^zs_"JEDo7 4S:PbHtnEݗs<[@UWD9O#Z_yf!G \[pYe(4Ẑk%kŦ- Xmh˕)c4GD9]S iT2)bj͇p1h8fLUCMKhxJ[h;4% "p\; Qvf 2g#S5緬G4w7^f@q(l' ˑe N"UL5Epf'2}#I=98NSeƹ>}}!کj?k<}&GGm5_pqbjGQ\um0xT0/ s28yr9eE^ܻ'7l1xb1TrAX7s`N^\HJq-u^q uBk9 E$Z׬s0NoY vy=i+u;%Y.=7JƘiَN'SS ZK%\V{.?1R|?[ bP2i3˛^bn5eeHj2 L"sT8ԽOŐ އhG/[OWpEEN0)F.Nm #$ueLD܉%)J Jq0KZl7z Wg_ ?xOJAxO[^dDς~^>RgTm#܇v@F؞dt?ni_YZ8MfH!lS?U*ubK[W;38ҫ]o5h3,@)ZL0~rtY5SjJ/}N^WGwUt0g_AvGJћV(eoכG+ry_{@Kwzܿ:CO oj|PfwGKenG8)bz]UǛ^P<3bZ9^w }-WC]xe)ZR+g~;(ևx=bB>;:GR$ n>ut~Uuus!JcIOV]$}tcZ;ށvS7ǯ|.4}'?:Kb94ӒΜ)㇔EƫƮS9q@>O>O>O>x-7^ P:Q;$DT:S0^*5Qx-;je0667^ngoYZ t+C^*॔X^Q \(Yd)jIPf2S +NV .HlMl8 W7jaGpQ ;)p+u[(D ֕($H5\˒'όEplx0;+wR@TRqšx iJMP6G҉@嚣e3ZRh)&9'ҨmQ\ֶ2. û&ìb9q渁HAX4M4ZBRHc$JU8۴ؒqg$GXmd*Rb̋)r>t0(e^<=X,oQ4gp,>Veo,RYw USߡVM?KqWww t~T*gz? cDUFD. e#tcppQ~(1<*<0pn7os!]m:Ͷz45d|p3C6"h#! T7!xS qi4r kJQA)F88-iLj)5"888Rlipw{Zz*H^PgRy"j"C~j-ʧsKO5atRN@YL؅l^48O᦭?}J)Qt[!F$:݅4H|,Z=E:)ګP$sDqO;+mv WV9ׂ1ïʛSV弯 ޭ؁}`HvQ5)BEϱNGHBفT^}*K5?0Sgzv [_:gN}jFJIb3&/- U bE-WyZQiEu^_Q7#1g9Ѫk ?|,?Ζjl6~;sSTм>n'zƳ xJ@ $WU%"U\PE3! y -ə]p}j :؜ lN9dWjt9AId: <|w36[ h}[{[b|R@ rK'C^'z^" TC n]PH?.'77o12SJK"a86 sޖ a! lK%⯆ٝcl[MrM};2E&m8wݘP$lFR v3ޑ`AŐڒc>IB6XQ*X3i(+Τ,KOޚ;KݚW@m$j|ݺ;`͕HXA O3PXM6M{V9Kn)ZR˩C1̃{NSX |vVDg^UP \iI [u4hq>$ 3p Foz9j9"ANU(v'%IeS8lzVDu<|fA#oRavc/=[S=t}Z/-A'7 E!\QޛȟAX~E|0VQLu  5{4R, )d X੺gUS ;%ɛ_N ѡuxw; y? nӺ}p7i.߼4zG/o?{Wȍ C/q$[31;~CKQ'7ARRPD]R[T/Hd"3/oV;@ٟ'2`2x lqKw4#)gt8FxAIN5C\܆:8؇d 2#"=wɝ{(wf0^)u`)>ol?<ͳ=|dIp*k8 NX0=l frdoӽC :&R>Y/ڃ Х܍z+/@pcP sMVy\}it9W|j`A:7hۈwӽȕױd>dK#Z8ޗ,J ࢫD%쭽Ht{T3~~⌝y@+N[b0NYtK5)Ki%j:egQN$E1eN9u;e) zKR{S>`6 z#3%c+ծ=86#2dljwC֙,3b6o$Q pDt>xOte0igS0!VɠyITlOFGUuo,b8I_)n;Luf>U)jEsz| 4T}>HI:1etLKCUQݵzyksY\]nv8{~;35iE~B-Uk'd*4)KU8RO>$P "*Ȼˏ7׸A3*/ N2D H {`r1Wi7/VG8ڙ<`pڷHp{xbIX Bh8#VO_r\MpJSվwKOͤ^On]rlMw|lײַ\6nJoP=Hڣ&yOmb⛴v@UTf)ijc*+R9킴qz`|^)L^2uK" 1ZՋe%̀3f̐ˬȌAog&6Cs*\pGIdHCdFPu} nWf8݉㑇~ BCvJxϢz`EHZ/ҭ"(涡YG"c=1T%V=U0քOcmY(Y0RU2ys3Ckӱ=yB"hZJXIqN̠3U3'P7c?Oz,ELط*}I{2Y4h"avwq1$"IEȋ$u\/o.XΕd Q$ڔJWE4@ DM*n01Qݒ'rج_:ɪ/_ ErS.ߒL?3jcJ7O9*Qgy&M,D԰Q*A:Sz/*GtkurF}!cBKC(Ѡ`x&b'P`bwĎ"֩LF ulXMhrh˴AmLH 2+i"wR1&Ѳ#wZZ!1O &8Ӯ(EXx@R/w'uwuqB 끁v~dy#ٻ_~. vo4^pR)gv `F{0@\-/d0^=[АkjXƑjW:Yz⬞|v//fx>6g~O7ԯ'}2k-;?JfK]b[}XiҴ.?() }zywˏp[IM- `R?%ng6iԺ\ծz}-I.˔~5"@K{C sQ![ri~smZ'ϟOɲptBqޭ^8ώ'^ac[ȕ [irOghܴgkͧh~+Аs!=,'w w:A<p19Hw!(*!'ky`}\Q@}2B6zIY،KŜ V p+u[' "eH /dv)aUJŔbnnQf03L\|=w٘2T J x1e{\Z 4V2ȽQ3bԌR1*26̸ ވR'jI"*o=2Jᡊ$6bXP`2{VQ{+bBT>/ه%VP"O u3s}Hs(thPdT Ԛo'R אeLņ gc}/5LI5Yi 1#JDLEib<RV4 mLPd YY;G lA#&q-*9\pw=^Y8LSjdQFAhGo(#=8ڡ0֤Ik;wҭ= v=a5~!M5:~ܢS]a\pEZPo F *_JraShȨIx%+,ռ"jD( "i@EMK?^ Ղv$# <E9j"RUU(R&J\FFu0QQzEJ5㦭U{;{wbeU^/85)3ᇔv@ˮD5ҐyMJ!p9^@ɦ jTpR( vN\0 -qeP|-0F9ᪧI 3|$5XtI1+A]yN;`.H5cS5z=a 6Kr9%ёJAΨRQuxefTlLQuERI_}=)!(\GV@bKI|tU*]ZLzbvTtfG˼֚,vl^DE#w}*#%2Xǣ.e9 kXHE8.G=dH"k* 5Q^;+j A W!ҁBM|IgRgz@BDuBxb3L^W[SD tQW@҂(8ڈ@gɷXGAv*"ö Uh׆'#%: GsbBF3I[40/hcSвH o;6yV̀I·ZqBc`֥זF(TG@]V>/bE@Ѐw(3m$ElW1.ZTc*8-H:3^iێ}1nbekjL@%x\P-2(p`.\G u`HLK&yL b~o'YM s3k9IchB"8RNGhHԄz@B>"b~*o= 0fRAJ' DE+XDj#@pp$HIK /f,¦(`vWerˈ6U6U#*ޫF\s^)KYwL>t^#^<ϓ 7MG6~ Y4hMvs=㸳ɷW/oV ϓz~WdHhOHmHA?;\iQCE8]ex-Gt$k|߄~,a#{CLv ?|)ЀteIh: _ M,kR G7EHGc&}髻'RJSJCw> WZۼܐViLv_*>5/UrH-G#C%c4yY0͓fXHd12TI2Q}cd&0Urږ՟˧2=y!յ'ֵBnaa3P×nac.kiX‘lBV9 ji7%SL] qLzay| sYhq|8yۄ||e>su|•%jl̲|'88ӷOBP?{6Ҭ/KH:Il[oF\1ײIT|/l2Pbf򺍃`%%l a1A4,6ua9!IZҀE GVB>K'zq'Y'.$u*˸WDNFjmTQ-(Xr{AdDžytXMBhOjʼn@&P6xool!ۊjojݨVWnQ#hdK+6 0R?Фk`P3-MLJL!TgE6wė D;_(MIi7a׈V2]]L=5xO)u{څZWpšnߋ) Ѡ7lf 2 {۽v l<`x2SMQX1:;E!9/z9%q5lަ7 PO8n]wna?V-KE\WLN7ld~z̯%9-+&(;1 0_K~q6$QoBWuҗ@KoE6Oq쁷m<-x/d@ 6O Er)d3!/g=,4/o۲GIQQmkO"INHԆ7TjH bdQ( gw;E _qt} &@0S.b{uĽ.a!"+"= <}dhC)i&(Ϡڴ08Չ!VbJ T!URJcrk=tHIX-G^ܟŻ'yȹ+FwHB6jq5h8#SXkHTX = H@ Cc/" @):s?ٟ2㲐wWBU#D#C\8\NlEЄH}5lXdYb6LYNP*> \Eb1F &[(MAoU<%CDx [6 Y ' ShD(JmI)`˻m=ъNr.,OF1&q=0Eߊ;,撮s+Yu\?R5a%3؀ҡWZI0;ǵ̛<  Q;qyjܝp6Zс5{5JC0a d'|k_[59e ?ɼ4rFHjgD60+Li1g\J<<ɄEY,2 ˳KY:1}w'9Ba\w>+Hxzc2tdPŰMG-O>lOoAӭV1gc Iޏ G4>pҖK?{h~8Չ&yÝoVBhiɐ!]ԁn&hSe}b{T' l:cԿ}M? NFut)ߏK|tuGч$BmNۖm6jri&/mz(zt~.6~71!Htt:AO۾_ہ$0ݿs¦dN w?KX3~'SM2 OcJErRQ?i;QW{ ׏<GW磹/^5Y rO]~68yci~8zw<>8=uL\]ߎ)H_yX~^1ѧ?g|IhAߺnun9:=xձ )Z`qc=l߿oݶӿo4&@i:[6~$V] zFRV$y:)1}GN]mʖڻox\.6HYl$4~{plzov& o?G߻EߺOچO޷:gdIg5ş0BHgPcs%h9>H~ uTΒ^$$>/giIZ#zGn? s >' ,h 7Ц!S K߻@ {Y[{x牪' vDF8W˷Mo.gi"ZN*6Sw0 e|PW3y$'殀S/as㈃6k׃o#?4쌿%Ml썹 ,o+|3~k(&?Og.}e*^r-~("td07.34 n#q-B !H&zENGE5>)i[|H5"ġf ~4]|lE3?MJߎZa*/ߟ<ĬjN' Nɛq=’0Q:voW5"o{!ee˭OW -~<y*KSc #ꪐJ(}G|?DR ?l?FT3I^;EZMlIT€*|5Dj%^zz1{u*\@#%d^yNAuEa5Y%|"5bjXi ˉ 3sF  Cuڧ AWa@,sA^KUB)%R&,pLX WpR% C 4\ N4 q]XJ5=R Ē Vp>eN,/ F&s:^1claB_)auHKKY|E[ABp-S!=&3YKE|})D,s qU:5U5#-TuHg|L[ݚ^>4gz~pi*+OTl)ʶlBr7nE5T sxNty M(s1)'ۅT"e|K Uo*}(lұm&ք*-nP)l A) % &dN8KdEen 6)^Zp*0ݩIIi1!4sj3v̅q”Sž5sx.DNE1J2=G^s\ NC!JzU, RR>(°PFʂYȗM͵3wsZ/7{.>9Nx0i=5 LcBco/b$:ͤU3Ύs:rZ'o%$ m ppt &\j>=רA6"vPO>uAaW׍<8,d)n7eoZl0=0I6ՉޘKf/ecLCt})[L{0!EvjB% -yD셍Asf@f h氐9u,ua D)*$S⻒RX7+ABCLdTD(,1j~NE UudҨ:pIm/ 4BJnԕh! K$TQB:k!| $FN4Uߒ/k)PB|7FN3T+Eu$aNKV!磺)= >a<ٟye%a)yKi/@DLm8f+yֹİK?SjM"#$o7XKYn7%K{jL_B7/[p6q]*D=ԥ?KOw PLl(" Qڦpb+YJLA1&T ʄz~}/@9m|%SsF0(/{ѱ[E"8 $4! ﳗ1(sƘGxV_fm>#R&8{ȩ 0iePS")=%A0 wFVxM7T[mbøBߗO.{j Yo dȴڹ{%Z^,pTM7 _PB(񋅂 CG)e^_,ӪmW\h?{6ҵ }Z%+~`[ڭD Vȗ1d M;c;ɍ1CXJ|yfΜ9s텂 :-Ő1^o+[䱽ql~^=m'ZH p]hVFBR4tVT4ҧu/Z1R.ޓ Z >4*:7(΢T`wF4>Uް/U?s[ϖ9^X]f]*wYUQ]m]s}t҃bNcDxt+}&񹥵YFi.6 jeeo=s`}mٖi"3[-=!x&{C'I;?WZPnxف-gvsa~`]}̋m5ṃlՔuBk܈->ifqg1kblEB^5@bX.A]k Z5ʼn]8h_rCΌI{L󃷇?d<> [?hw]jT9gNkVDa ւ&FM]4b)e210DW۽~3{e;+_=(Ud %"D"@ g.d`8aΘ1ELN/;i^uנTM8 QYWby# Z jC4>T_c()+WDMO1壁M8s2(hm _ Y}@*ltDZؐ6_ЅjneȒ ќϡ9K$΅x D39 yZzPJ@ dYL@ S+mAv "Q@bB|Ga=ɋ>!WE>%7ġ B?N%g6 [-H8BND "AbR> H)Pa P~8#=B";;7}]\aDaD~3ƾTZʬP݈x}ua@H>蒈/%NN% %n*D@*r1}rRON䄚̼3@*NjAN*hp' c`'lica-P6dx–?-@LV=uE{1T W1".*ym h47s S$N4g2;2FͦBj^6!X3c_g^ۍ }F? ,cAt52-J Eloo%tRˮ+|&݈o(P&`2cUClĖbP  氭7PW}c=6T~""l0..O}|/.'{)xKFۘ5nK95!mZu8%/Mx m+WcɌw$R-Q>rqq}|tA-p N%`Ah*! n4,) 4bB1R1BJ!T ,5TAƵrȒ!j`&0J &DMdz"l^_F0R/~ y~4'f3'G4bBM*aPTv:X%H Ե@4iW$IqT29g5~9mX*TqB}VJ,UP ӺjdP*K%C0tZ" D9_FVPB' B}V,UP (nB-SQP*d ߬B%` 4 rJ,94rD@'s[? .tARn8Du[7Kۺ{7~x|_Ot5[?T{tW8qziGJw2MyN}$}0yi*In[{{q%{W$WvZN;N:Hِ*L?Lx80H0b7Ew+Q.P2V y2=hgnyaO@7u@d,6>g J%.70-ɒҏ@!$~.j I ”ijvzjռͿs, %•gRT5|Ԗe!.*-"*U|\gS#?Y0 W ׫ W)8Dֲ *5q4 ^&`Y4Wx+=rKB]$p}C,@>q&V.)`S&+p?wX$V/W+pŹPT^#XK&ѣAY#ԩ_2DD].ŵ0u(ѫ-l؄5"5uaJamh3cY qXQcԄ zUb\".Ջ캪1D6 CZx@- 3Qx3*: +-(lcdS1FU7e zX3{EN*~L76BGӻLdλReg^qLwaFPξ :ޙ(NT*%edݸ31ţGGos8[fm}Tu7FčCRqsߍLRS ˟/ާMuAwSJr{пZIRr$vZ_ pGm:j5 >j ~zŸ~ew?^5Ec$!sNspY#;tzYB T~oxФWQ۠X&]wsߋ۹lno7~ޏݳD#2I*>?OKw?:Kns0sƥ7i\b;JӞzҊ䁗DM/f쓟meq#l9U]y2g ?bx2i2SO# aCeTK5i/l }igk'WST-~#̟ R ^DZt{q&Sy| ( aAJV6AaE缹BIHdsܹAlO%a1Eozջo:kZr;##GD}7 >QdH ued95CgdiPcﷵGS!O.T2u|<#TM[rhsS-JjMM?(..8zMlUZ?˺x<p~ڠo7kS>뷊&f `~?6 v\~Gѹw~3Ͽ9tMm#7wz1|~sTgA.U2_II]m1PvߜmAUYΙ^==yK]kLS4hsmj{et~F(mWԼu}QLnmTL96ž._s8_4/N罣ปNKJ -U2ɕ[`+*D}7YI+B\_Hr\x,|˝X列XQcb`+\ 7AA0[LYDJAETMkm~Zq %cg5]P) KzťgxNǘN~(=NCԵtVo ƗnYX՗#2 CfuaȬ. E*O fH*A@ba,(Ո8 N% )҅8G 9dvn heJ[cv9frRBbPw(3LXLӵvrOnKfLq|rtV_Yyʲve_{Ӳc1O=A2[G?y'~v~LZOxg[ 9ꓴ7 d,pPn>^1UPzr567E#]9Uya+j9.34딕.#;ɟ=JE#um1;F(#[T("=+ E UiEe1>f͌RȓLvZ0eU"Pb> 8z`0d5 !a T@HU*L+<ߔl}6҃>4x0#ݮP@ DHhk 1 }0ECa4kaKay5ɪܾPKN~r9KrFi̳ #5s*lMPjgŝTc>+nyBۣBO&C,"wR加+Q$6>IHWns)n4~frGT|_v96.eN=l1:Vٛ{cVD.(;mI<'cٺ c.#YP)8,o2z؉T٢{_wBqfU%lQn㻆YqUédSNKqιg !A' Ԫ!l u@.z9K9.CQjz(n`GJDyG-n^J6[w7޷k-t()C" Gp\k.}3.%"a)iϷ|J2Xq>I3D{GA.<#!+(/(Gޣ؊AAhPex+!B.V[h=.GiǑ! 2`Jzr 3Iă,;ąx!8yx7lP?!&74ɥqÅɲr2~ɑu:6?2麟ڛ( "Mh+kOjzZjvalQڱi"+ƊA"y YZΟɧr'ɾMV ӶQfte dX ih Di{cƾSvTB͞蜑Y~w=X1pu/UrK}^b Dy(ɫel(ͣǡ 1T׋pn'8iEc)")Oy*Z"2]%,Xd[eeih&*cz+Y(y;~1@uT,{ K3o7&V&Vþm7!mdBʅƯkh`~dQ%1*J*zLpP:I5͉I)gA88Q)Ts'0CE;f)ł: 9L$^V\KAlXƄK䩣D>qTФ 5=rYaAhc#& xa> ~!# .0GX2 ĈZºV}q糷GuZ`3r,QuG; ߱ ) qeO3IN)V@j N9qhQǤA7SsE:1 91LhA(K ҩ I3Ɯ7:ЊüB%Ղ(UG8dņ!}MLHV僉 ԂMBZ8TeOvF/^.<]E)*]gHxò<|E$"2 #"d+AJR8Bk7 %gfy7(0׬3d>مIo[-nP׳cML'9Rt<[ƾ2cR;XiLRqKPù ݺ#pYH \ 뻩7㧰hvoթ 4;X) -:FyHc8ʣwKE]*\4qΏ:6FVwh4'jdX_cwK3J sĴ)L.%DO.5 (ݤsPӈH+{fuv9Rk0{X;kJ$D~LDq+4X׼~6u#aIscnPT&^މ{QWwmDU!mF8%F"cWBa cc,I1TSYjlF@,Lm߉.O0-u|0٦MK## 1ZBY T"06C/U" )X !B$=nG ! j7 ]pEk "$2: `1>kcfR h)h ̭B\T%'H?UaZLߢ1M9 PGP$aBQ򊥄!e8qTSQ~+ؕʥ:F2*Ajĥ:&904҆y9P,['0$0U]!V_~~z.K 4 aPY}eTE1scڗew'S:J}Itʋ-i11TR ƯD` " &T#[kՈtR",d#KD*9*O[H|d5uI)rB~1 U6qB׾M+{(U`*/XG`2ؾR՜=1Xd*F`!#B2ؾMPe (~{a({*U6 YVl&R ^`%E 1Zej)Ru"|1ڕ]CLCNvM>D&s|z7]68~Mƹ<ܼdY6DvӵO̗X?RWMYd {1 VwFF0W<;^w~Ά"\dE/_N~7+ U~7Aʧ^fȳ>?~d {I`F~7ֆ oYڰ?8YS .,?->oE?y?]/}n60~v:ԓ{|fΟus=Kw$// ${)Ó.'y9=Ο=x_v+>7 MV/2f- Y34?mo'3yx ;} *(*|czHW3Z3zO?ə'ӓͳޓerѠ@C>gRn\?3ccLCiޟO_ zq>'y^WvL^] N ~v/h4P3)x:>/::3SYiLFWuˠɮiMi7{iNۧhR Kbow S&2>eMpΙu3PyWdiE I]ɗLud!3.Z1{k)k1L{z3kYӂƄsW=Izww.6uL; S]HL z߆w ܨgL }ڑ&bV"/UĔ]8D*0EdHg}h-22(sd/zД-gރL2 "shPR>Y2?:+Zʞ/ {SMSAKBI!ږ ѿZ+df|y4Xh2crhN-q/m pվZyu>zO}dVfHY;f^"!jշ_Aۥ/r|0 _iRxsF89$o.+#󺛇rRS muFk77[cmI=O&<9|)8نw32^>0&C}{fy`+]ƭ⚫r'&.%Ocqn4q*8MX:F+^zi(%n7Fѿ`iKTжdMM漋%; Ѝg<<6>6m~H+,{?(v!R˃2M3yӤ5M GeHd,k" -u`JE8(kb, US1PkwmI_!eKYaupYȒBQjRGP  Y^]]5 ]۸Svdk1铻ߗOy3q|gE hl L`c`LVhM*]M:m62ԁnU꿾ˇL<;&ˬ=P|h}ȹ*bբO?|srсuQf(lj7[GjJANPFf^`"0A.GmrɃ%}W64ͽmN5fƭzlȁS$E:()b1-6*TR9Kg3 1 (,6̊v1DCf7qgE^fSRwsX1 ]2dwW"(ҷ\_o?~w4ekҹ qs.ѧޑY9x>;߹p7? dt -klP9Ʉ)X@X7CE*at( WV <[,\]ͻ.IzHh_?Ip;;v/o_͇\w~]rWU'0^={'kώ*I1U<iT15߅jnFuo~L59wq̻RVH#q g7b"Sy {|v O8:uX( %QX6hH2!HF.A &8p[QhqSVjOr>ǒu`9QEҌ^;RHNZ9 ȝ.Ev7Kړl0ǾQD&QPt7QNJf7!fZ-&p 0 F$(>~˥T$2L)q,8 JeL&[(h]!H?X`0 ;k hڣc(1O1g@I3cj(a-'cm!%A` jbrq~& úm7L[Vs4SC0Y`y9._MI.D`2]0 E?֪e)31VP$ vL289&`rZh!AvJc: =)HK%I^Yc5f&b" :.ߐTި 'eE%歆tQ^$QM'1 }l$HH(ޖrII|Fg&v"C _cѱ)BE7Xt9Qh$i5T*x; f7Mqڢȇ ,d @k]֛j2#Be4Ij W3kNGx.<]ɤ<=.Tu'{]Oe 2({]bŴkV;ş 5vEoono n#K>ڃP;Z7ٛ?ҨF^{^:NOjx0/M v9脮6 %3({_u}V+ ˖²;c?3G X  ]+6\ BU^6Zl.@(< z5jɡvkNFfSnV*B5JkO `;z4lvET\J</4"a]SmO uQ;E:Q@c? J'cPJe,.0#pYP8+A˖;E,~B'9q,HOAfK5IN$y8DdvΑ Df.f`Y+b!+Rbf:Qm8I?I1E(0MvvHJ4XA^Fg# vE,T%67ш*5&;7~!yHi2\GXZ:)=Z#/q:. \H%v4E c/E wSo+3o/&?|_(OKX׌W|=w{QAnB\Im x.7'lQfL4GJ:4=Ͽ 1% M4ħ.CMûyngSU`31MC}:VŔ6 ۚeRKxhI5P;$R80Dj9xȓi".橞DFmTmgIߡmiOVk'`FYu@YԤδq>ٗL%OШ24w%[6Q`ykz%7 IPk n9 ѐ2P!̼)IGB6TZhүc Oz zon=O4 cf{vN(k*jYPf!Um=J[ bcҀBڭ3 ] J9!RaNTYR+LO8 f,o^^'>Dnvoic&)]Ǥ]_]0=m Ɨ.f_^K[~bCrX&(&"׷i0Zg1 u]3)e[֟_~O>'c{[DwTM}"sNcCcl,T,9B 㕻߇k5x2܆U)nz60zqMm-ch>(XşMocuMw[V jq?)*6yq0ehoG:[ufX^>*8睹7NP<bkfsY?TɧͷNt>!lO'oyCh6x5K>xiEl h7 /N^p/~4f ďS8 o 3b{kq9E\0YBS؁#tE/O2zp;Q+KqsZ7'+r!lG/v!Dbc)$e<^ĚڢOj~rabˣm}_e%@Q-W3asZZO,iAȆE怖 ^ e]*~Ohӯ~f-6߷z=5/WW?r_sJS.ލ%ʶb߈VKwW 4%NZө18 0drlo5hJhp>IN7!%\!*DƘ C؇ AL F(1Ϸ}Ir,tJozGwku;(Sa|ѢLGpҕ1k0Leh.le _2T$L*#,^3sW:8!0mhS^Ԡoy&2#yRSC_`a ԁ55sF$*9UDב@Իn:TraLkT5T#uq_CP@ah5&A:PY+7kY5_nY%hKWOwMEZŏ4J9ilp"߳yK {yszU&D?~(+8>]\_}:=ƟҪ'} /uwuϧnNiUӟ(3t@J>^4~}1cE}@LޞBilg虸IaϸV>{V,O]'>u|o'ѯ^e3nq<]ݞwM\7NNhſ;dy:?s{qto֟h_ )n:7룺vP2-"G. {>6ņxduNZ?TypJzN([71q*zo^ _aqwH9u,Df03Cmwti@lPvuZm@_&mĆ=E ˨:=jQf3|W3/U7dg~^;*e*З5ӈH n$>'Nz 1=nST31=DOfנm,$B+I*{ =܈I*_p>91D^eS/,^ ViΥ^X2D#|DꅎƨCp1ZL z=Ka掜 9gC!~ܴNgOB0lb¹1Cqn9mO~ٍh'c\Fep)BCv֥j1N!z7Dt0jo`A;v &lˠdېb"{fB @^,]74.hf mM2ddB%3*=$ljޱL,vFn/eAi3P#ٺL(= 9cFBP m 4}H䆘ĚI%8!lKA׎v˷P:v|2<cڿl mIP*d(/ ċœR$%Z_5jZA嬆LߩDo+qKIԳ*n/0v2<y몼+Ԟ@*^w#.ﺲz;^X[Jw.6]2/5Y*aY] %qa4}F YáQdlo:JH=[!)B]33iCĉ2:5V FpE]/#&q8Wě ws9Ih2.7cq8Ċp1d>)RNh`G*JZv()gfqED-ԄGgUڻT#Sf b_K7R:B6뒊tQ3{:Ukj' 2ȃ1T'9R;*k#0DrQ: tJXKW_K<8-3G 0 ǼC4ov46I>@.WDc'auКm:pvѫ\/奉,Y@2WQ"%3\Õa[[wLL_妚omT{Ob^]lFsb*0tÜa 7Y9WLSgC/U"0ܕ짭Zfj. u~qbd ^@̠a@٪Z ~pn%XofڃΓc)FSInY hQu!(ZHw=r*R )Bz5df -׻OE*`zg~Bf*PΈ $m$( Oħ{ڼ.[2?r&1[N$/ٺ%LQN[$լ$:NRنO)F6=Umս?\z0z*Ah s_Jk3%~OS0k61{aoʀ4OmvqUCCU koľ^T'fTc;XS89IREغLU F=`T9?l1WG x^zh;BvI6A`|BU;&td9j)a3.)Sb|PzuN2v33BAz /SQ:;_jQDp`>OORɓ=OuÐq7 =f(i!}t]\V}!ڻ۪)ZUTRk 5H磌/͙L G5ڢ&TPs>kb85Ê/jj3-.q)/R2_UDx ,R\$‰ן &̈T~_|vǓٸ嗱Jdu#:3ĥtjZQ *kR gPƴShbr3 y ѢhRSJ̳7ͨI,+Үx c]}!Iuq/B*~|pE |jl`C$c4ŲR*@jYClECaj9?P^+4[ZMvkJ`&rˇ`x-/Y>I_Б8:CLsHʇ%__v޹X]ݚ?ՙЇxgq*[9vk&ڝq/l͉v^%@uەUmA<иKq*.VbBZG%XI[e&OFjf9 >G$ZT1>1 U '*WtpW,a֫'flh39=f+3t z^3Nb%8sh5""u>I-N[DNf0ѺfIӦWqnJssy3rkC^ײ4jgzB`Zm ̦)Z%/߬kHy)RBǬpڌgD=xh*lRH{[غV]Ռs=RMdȁV "%zrH;F2f,Y)Ԫ@S AXmsM ԁGFdt 5"0xzpi1i@A|P!Ff1>tF\bE]/X;tmY_n6/=pIg)`D;z[i?(_?ܿűM_~<y~y&+B[crjmAbd:ӟ(P1C%Q:x>I%>ŕP\Hy|t]ImK ʐm3&1_/]wKyZ}lkȴא6$ᗮ-_ܣ]zn:p!--N4E#_c,j\I')d΀h\dКR%f3gRWvY 4ӗM>Qšj7%uAhGҧRDn3H|HƠ0sA2J8+߄ N4C紸:w!4[['v6dc*"6+:66LݥإH)QB\Tvye7I <H]<:ۍ*,^\lģ,#*}%lƞE(X9V ,R-sZWH`,&-{5lA?ÇMH6W' ?Ek0߾Hm 2 5lLRu08O۵E $@.5!S9cf@A@ ]jZz[OvpIB{}lPŖ2D8wzUY(#\QL؟m:Z+Fq[Mȼ}%o* `_ޤ2;s`oT^B5aؿÂJEԥI. ~ٿ?,X kB+Gv䊗l!is[J);Z0ц~l?4DNDNDNDNy@]6p@ C1\C`ycz=ފw8Ohpw6"V˼1 gln0rdvQ v?^b$z>oy eeVo`٘CcK^;rA/+#zUYGcmdik-'b!L0\4, KCH=VP!at+kGGQ~)+e 禾+M_ȸA|֤U J%+%ն0Zjika$ChkR`F8n]5J*.啷XH5 ()ya’Ғqב FLP0dOjD0dDd9g^I^\,sB0SV5u|$ln5++lUE†WbHauXd38Z(AHϜSJ,&YhT i咂$Z uQ_N`;Euʹ*1W?mrIm#[2*VEy)pЕ `䊨'L Ť*dүԪKd{/KtW.*)%2Z?>Y)|rZK $9%JFMR1@UNUu  =3cVߚCuJ&15eU^{9d MMr.Y6rp|Pl53FuS_E T}RJaQZ2ңOacWTEFIғUIf +%rErܵfZ0[ *ѷsw]_MQ*x@|Anx;N`6s [3fq,[{ġRC[HaqtG9eMwx̿<E&\q1M2mpbeOCS|:nɨ2|Od+) a Ժۥ/39[pZ C6Ms蠇o܎ Tオn\ã4"⟍-NiЧ͍vЧoo1Чnk =1_ky{[ l_#}>kˠXbhsTpxo[ GUt;׾CbcRn* 0N;5~%QwlMTZ1!Zp4~9ݑw kg6 %/2]{Qn.Gݞ߿+{wfwdzpsM-:;ş@npy})_O 1=}qBiY<^W{cUUSw6Eoep3O?w|<1Ѝ<-V޼HB޸)Ϭ+;n}1bv}O[y֭~uBB޸8}8[7i>XԈNgtn"[m y"Xx9&6ϫYa<"bR(V[U2a\JF޾0QYM3De(^v HWV=[SZlN+(p>mc=՜֪:wzs!R_a~Ѷuhbe۷ePu ˵l7G bnFt{6F_UҭH8ZwX=r }ũ푊S2!qwũ, bok'J;A@B_z_t%{3Jm7{/FOf3{|m(2[nkJok)Z7ZvV/5>[/^E\\Vu y"G{IY70VuA}F"Cܚu:Һ !!o\D{ɔ{q>nz/*bP#:}ź2@lsۺxQi݆7.˔.cN˒:l#;{}nPZCְG5+5JMpr?Ws`>wvH4_j I>DG Z G*ll37{zg_S)+ m(xOVzq*EbBOF[sr1h}ܵt&-͋NJzFA.2VӃj +lnBX\iB90F?bn'ݸDoO:x/|&͡APBi}~)' O,x6@εq*Fibڨyr"(_J :A~[NYh Qu@L|)O 2B-ݘI胶J(Q[ 8O F{Bp Y f1r Im *!Ɗ`Ly>'kVKL>`:,ߒxyr򋀙y I@$fڌQ9u [l>.^R))*d(1cQL04D Q|S+5IR ר).D+ B 'OVYI0u$ 3PߟWMyFK ƱӞe҈Ҿ2iAZgyoVr jBXՠQ tE՞KdIm (>Э"7,Jqf1["}/#}P̴߮v(N8hD 0D$ @3Ѯ1 ՘hPa(1P(a4ϛEn)p^3*LJmbJm. | h#W߾M!P07 )#Jc6*H^;dKX& ${@Վ.@(cٽ(BA"&B(-A"&B  Ggs;b|jΧO꫞(4&3Uj0N8ᡍR_zgzӛZ-$SoZ|;l k9X#^4Jj"JgvҼ}>zÍIwJƆ&rJ_>iWݗZX]i{zBUoZ cD!zZ1;e9mϦ^r&نNe*$%=xjbLM~v~딓$R18JR@!||xMóڀvC*,*& ߬@,^w!H oQ捐oD\TS}PtIhv dn+y9n7v~ύ9i7T>z$ ؕnotȚ(ozV_xjB~ԗӠ<:=ˡBN9%bMMY0 -$K#= #+GGi@_ݲ.Ķm z5iA a_dZNn;%zlViԤ˄ı1A--%j5"6 X^:*P"Itt-wJ}ܯnp]%ɇ5w8u$J NxF@,ĒHKB? CN,ŔR)Cm ,Px`e@km]eյOuJa'H 1 ꫲl!"NUBD6+(o~_فfQMTwTk"ɟ7؟ UցK TwT+! F)!~(%(DuJ5fd'6:)ّR?=EJRiK({IHжn32~4F2SBxYQj(ARY#DL3-Hrc,@1I eRۊ@jǔr-n~!,5"F`Ih k 2H CFL,ml FRH?]JqF86Qa{oQ/oⴽҗlŠ>xMWL-l1ͮᏓp \6r8Nm8wS '7C6g,X5ʘG9R4ndens^$ٴ3ۛG;N}t^L=/Ssuy,%lRoag6MCLnv+rM3K,m*MI^%A:NmT#f>&6Z {\~:UTXGF'x;_5+zarw X 4Z U犥O4lRRURX>` v)ы ^ϏŸ*7o!<ҺoaWO,PpX M8J2$^ y d`g:B|YC. ?hlH!r+2ɑ_QM .\Mu}WR(OEp,\5KUQp[5LyX|d*"6)-뵲Ϊtع3g҆Ϸ[jmվ۪VDdž6x<1u&Gffa܀? k`A !*[QJ]rMPO7"ca e[ZCg(}glJ,U .X(vIY?r?;t w6"< j kgZRtB;a:uu{B^שOf꟎{Z} C){]S;1X88Y3S M xpG灷W(xvyvwY?Bpww3{Tk~:oY?]R,`2ZHVDJRn> RҌjU8(e;ju*J(PDrOϝTBȣ@)Z vBiQ/,Rt3*uRBPJ2JQP?fT+OCoQ ѣ}var/home/core/zuul-output/logs/kubelet.log0000644000000000000000003742345415135533370017716 0ustar rootrootJan 26 00:06:54 crc systemd[1]: Starting Kubernetes Kubelet... Jan 26 00:06:54 crc restorecon[4690]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:54 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 26 00:06:55 crc restorecon[4690]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 26 00:06:56 crc kubenswrapper[4975]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.005856 4975 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008236 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008253 4975 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008258 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008261 4975 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008265 4975 feature_gate.go:330] unrecognized feature gate: Example Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008269 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008274 4975 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008280 4975 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008285 4975 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008288 4975 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008291 4975 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008295 4975 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008299 4975 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008302 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008306 4975 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008309 4975 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008313 4975 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008317 4975 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008320 4975 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008324 4975 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008328 4975 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008332 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008336 4975 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008339 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008343 4975 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008346 4975 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008349 4975 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008353 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008357 4975 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008360 4975 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008366 4975 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008371 4975 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008374 4975 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008379 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008383 4975 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008387 4975 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008391 4975 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008394 4975 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008404 4975 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008409 4975 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008415 4975 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008420 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008423 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008427 4975 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008431 4975 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008434 4975 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008438 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008441 4975 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008445 4975 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008448 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008453 4975 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008457 4975 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008461 4975 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008464 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008467 4975 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008471 4975 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008474 4975 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008478 4975 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008481 4975 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008484 4975 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008488 4975 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008491 4975 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008494 4975 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008498 4975 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008501 4975 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008505 4975 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008509 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008513 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008517 4975 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008521 4975 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.008526 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008767 4975 flags.go:64] FLAG: --address="0.0.0.0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008784 4975 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008795 4975 flags.go:64] FLAG: --anonymous-auth="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008801 4975 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008807 4975 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008811 4975 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008816 4975 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008822 4975 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008827 4975 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008832 4975 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008837 4975 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008842 4975 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008847 4975 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008852 4975 flags.go:64] FLAG: --cgroup-root="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008857 4975 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008862 4975 flags.go:64] FLAG: --client-ca-file="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008868 4975 flags.go:64] FLAG: --cloud-config="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008873 4975 flags.go:64] FLAG: --cloud-provider="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008878 4975 flags.go:64] FLAG: --cluster-dns="[]" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008884 4975 flags.go:64] FLAG: --cluster-domain="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008888 4975 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008892 4975 flags.go:64] FLAG: --config-dir="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008896 4975 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008901 4975 flags.go:64] FLAG: --container-log-max-files="5" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008906 4975 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008910 4975 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008914 4975 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008918 4975 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008922 4975 flags.go:64] FLAG: --contention-profiling="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008926 4975 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008930 4975 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008935 4975 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008939 4975 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008944 4975 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008950 4975 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008954 4975 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008958 4975 flags.go:64] FLAG: --enable-load-reader="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008962 4975 flags.go:64] FLAG: --enable-server="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008966 4975 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008972 4975 flags.go:64] FLAG: --event-burst="100" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008976 4975 flags.go:64] FLAG: --event-qps="50" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008980 4975 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008984 4975 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008988 4975 flags.go:64] FLAG: --eviction-hard="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008993 4975 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.008996 4975 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009001 4975 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009005 4975 flags.go:64] FLAG: --eviction-soft="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009009 4975 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009012 4975 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009016 4975 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009020 4975 flags.go:64] FLAG: --experimental-mounter-path="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009025 4975 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009028 4975 flags.go:64] FLAG: --fail-swap-on="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009032 4975 flags.go:64] FLAG: --feature-gates="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009037 4975 flags.go:64] FLAG: --file-check-frequency="20s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009041 4975 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009046 4975 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009050 4975 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009054 4975 flags.go:64] FLAG: --healthz-port="10248" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009058 4975 flags.go:64] FLAG: --help="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009062 4975 flags.go:64] FLAG: --hostname-override="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009067 4975 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009071 4975 flags.go:64] FLAG: --http-check-frequency="20s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009076 4975 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009079 4975 flags.go:64] FLAG: --image-credential-provider-config="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009083 4975 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009089 4975 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009093 4975 flags.go:64] FLAG: --image-service-endpoint="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009097 4975 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009101 4975 flags.go:64] FLAG: --kube-api-burst="100" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009106 4975 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009112 4975 flags.go:64] FLAG: --kube-api-qps="50" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009116 4975 flags.go:64] FLAG: --kube-reserved="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009120 4975 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009123 4975 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009127 4975 flags.go:64] FLAG: --kubelet-cgroups="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009132 4975 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009136 4975 flags.go:64] FLAG: --lock-file="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009140 4975 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009145 4975 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009149 4975 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009156 4975 flags.go:64] FLAG: --log-json-split-stream="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009160 4975 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009165 4975 flags.go:64] FLAG: --log-text-split-stream="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009169 4975 flags.go:64] FLAG: --logging-format="text" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009173 4975 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009178 4975 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009182 4975 flags.go:64] FLAG: --manifest-url="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009186 4975 flags.go:64] FLAG: --manifest-url-header="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009192 4975 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009196 4975 flags.go:64] FLAG: --max-open-files="1000000" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009201 4975 flags.go:64] FLAG: --max-pods="110" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009206 4975 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009211 4975 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009215 4975 flags.go:64] FLAG: --memory-manager-policy="None" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009219 4975 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009223 4975 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009228 4975 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009232 4975 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009242 4975 flags.go:64] FLAG: --node-status-max-images="50" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009246 4975 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009250 4975 flags.go:64] FLAG: --oom-score-adj="-999" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009254 4975 flags.go:64] FLAG: --pod-cidr="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009258 4975 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009265 4975 flags.go:64] FLAG: --pod-manifest-path="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009270 4975 flags.go:64] FLAG: --pod-max-pids="-1" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009274 4975 flags.go:64] FLAG: --pods-per-core="0" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009279 4975 flags.go:64] FLAG: --port="10250" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009283 4975 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009287 4975 flags.go:64] FLAG: --provider-id="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009291 4975 flags.go:64] FLAG: --qos-reserved="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009296 4975 flags.go:64] FLAG: --read-only-port="10255" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009300 4975 flags.go:64] FLAG: --register-node="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009304 4975 flags.go:64] FLAG: --register-schedulable="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009308 4975 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009316 4975 flags.go:64] FLAG: --registry-burst="10" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009320 4975 flags.go:64] FLAG: --registry-qps="5" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009324 4975 flags.go:64] FLAG: --reserved-cpus="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009328 4975 flags.go:64] FLAG: --reserved-memory="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009333 4975 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009338 4975 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009342 4975 flags.go:64] FLAG: --rotate-certificates="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009347 4975 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009351 4975 flags.go:64] FLAG: --runonce="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009356 4975 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009361 4975 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009365 4975 flags.go:64] FLAG: --seccomp-default="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009370 4975 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009374 4975 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009378 4975 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009382 4975 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009386 4975 flags.go:64] FLAG: --storage-driver-password="root" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009390 4975 flags.go:64] FLAG: --storage-driver-secure="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009395 4975 flags.go:64] FLAG: --storage-driver-table="stats" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009398 4975 flags.go:64] FLAG: --storage-driver-user="root" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009402 4975 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009406 4975 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009410 4975 flags.go:64] FLAG: --system-cgroups="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009414 4975 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009421 4975 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009424 4975 flags.go:64] FLAG: --tls-cert-file="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009436 4975 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009441 4975 flags.go:64] FLAG: --tls-min-version="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009445 4975 flags.go:64] FLAG: --tls-private-key-file="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009450 4975 flags.go:64] FLAG: --topology-manager-policy="none" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009454 4975 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009458 4975 flags.go:64] FLAG: --topology-manager-scope="container" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009462 4975 flags.go:64] FLAG: --v="2" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009472 4975 flags.go:64] FLAG: --version="false" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009477 4975 flags.go:64] FLAG: --vmodule="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009482 4975 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.009486 4975 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009584 4975 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009589 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009593 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009598 4975 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009603 4975 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009607 4975 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009612 4975 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009622 4975 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009626 4975 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009630 4975 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009633 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009637 4975 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009640 4975 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009644 4975 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009648 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009651 4975 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009655 4975 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009658 4975 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009662 4975 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009665 4975 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009669 4975 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009672 4975 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009676 4975 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009680 4975 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009685 4975 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009690 4975 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009694 4975 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009698 4975 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009702 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009705 4975 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009709 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009713 4975 feature_gate.go:330] unrecognized feature gate: Example Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009717 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009721 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009725 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009744 4975 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009748 4975 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009752 4975 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009756 4975 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009761 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009766 4975 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009769 4975 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009773 4975 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009777 4975 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009782 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009786 4975 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009790 4975 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009794 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009799 4975 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009803 4975 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009807 4975 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009812 4975 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009815 4975 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009820 4975 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009824 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009828 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009832 4975 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009836 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009842 4975 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009848 4975 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009853 4975 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009858 4975 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009862 4975 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009866 4975 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009870 4975 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009875 4975 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009879 4975 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009883 4975 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009887 4975 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009891 4975 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.009894 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.010079 4975 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.020303 4975 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.020340 4975 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020517 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020534 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020540 4975 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020546 4975 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020552 4975 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020558 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020564 4975 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020574 4975 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020579 4975 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020585 4975 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020590 4975 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020596 4975 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020601 4975 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020609 4975 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020619 4975 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020626 4975 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020634 4975 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020641 4975 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020649 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020669 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020682 4975 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020690 4975 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020697 4975 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020703 4975 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020709 4975 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020714 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020721 4975 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020726 4975 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020757 4975 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020763 4975 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020770 4975 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020779 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020787 4975 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020795 4975 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020802 4975 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020807 4975 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020813 4975 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020819 4975 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020826 4975 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020832 4975 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020837 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020843 4975 feature_gate.go:330] unrecognized feature gate: Example Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020849 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020858 4975 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020863 4975 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020869 4975 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020874 4975 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020880 4975 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020885 4975 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020890 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020895 4975 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020901 4975 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020907 4975 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020912 4975 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020917 4975 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020923 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020932 4975 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020937 4975 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020943 4975 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020948 4975 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020955 4975 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020960 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020966 4975 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020971 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020977 4975 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020982 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020988 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020993 4975 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.020999 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021021 4975 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021028 4975 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.021037 4975 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021331 4975 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021341 4975 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021347 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021352 4975 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021358 4975 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021363 4975 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021369 4975 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021374 4975 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021383 4975 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021388 4975 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021394 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021399 4975 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021404 4975 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021410 4975 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021415 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021420 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021425 4975 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021431 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021436 4975 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021442 4975 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021448 4975 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021456 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021462 4975 feature_gate.go:330] unrecognized feature gate: Example Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021467 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021472 4975 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021478 4975 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021483 4975 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021490 4975 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021498 4975 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021504 4975 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021510 4975 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021529 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021535 4975 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021544 4975 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021549 4975 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021555 4975 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021560 4975 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021567 4975 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021574 4975 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021581 4975 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021588 4975 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021594 4975 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021600 4975 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021607 4975 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021614 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021623 4975 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021628 4975 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021634 4975 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021640 4975 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021647 4975 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021654 4975 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021661 4975 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021668 4975 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021676 4975 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021684 4975 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021692 4975 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021697 4975 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021703 4975 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021712 4975 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021717 4975 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021722 4975 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021728 4975 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021752 4975 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021758 4975 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021763 4975 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021769 4975 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021774 4975 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021779 4975 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021785 4975 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021790 4975 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.021799 4975 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.021809 4975 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.022096 4975 server.go:940] "Client rotation is on, will bootstrap in background" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.026168 4975 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.026269 4975 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.026895 4975 server.go:997] "Starting client certificate rotation" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.026922 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.027435 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-12 04:54:14.358505546 +0000 UTC Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.027539 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.032560 4975 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.034873 4975 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.035036 4975 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.042542 4975 log.go:25] "Validated CRI v1 runtime API" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.059595 4975 log.go:25] "Validated CRI v1 image API" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.060900 4975 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.063574 4975 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-26-00-02-27-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.063625 4975 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.086367 4975 manager.go:217] Machine: {Timestamp:2026-01-26 00:06:56.084546027 +0000 UTC m=+0.205751571 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:53ded227-eb06-4731-8131-8760124d118b BootID:f710b43a-30a9-4ff9-8d9b-11cb2688597c Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:c0:19:90 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:c0:19:90 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:7b:59:b9 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:1d:02:06 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:26:31:3e Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:e8:6d:fd Speed:-1 Mtu:1496} {Name:eth10 MacAddress:6e:9d:07:84:cd:1a Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:12:e4:c1:f0:14:bf Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.086663 4975 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.086908 4975 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.087687 4975 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.087947 4975 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.088017 4975 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.088518 4975 topology_manager.go:138] "Creating topology manager with none policy" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.088544 4975 container_manager_linux.go:303] "Creating device plugin manager" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.088839 4975 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.088899 4975 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.089374 4975 state_mem.go:36] "Initialized new in-memory state store" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.089627 4975 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.090830 4975 kubelet.go:418] "Attempting to sync node with API server" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.090863 4975 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.090911 4975 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.090933 4975 kubelet.go:324] "Adding apiserver pod source" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.090947 4975 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.092726 4975 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.092790 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.092901 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.093052 4975 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.093023 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.093135 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.093940 4975 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094488 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094514 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094524 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094540 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094557 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094565 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094574 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094588 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094598 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094607 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094646 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094655 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.094867 4975 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.095266 4975 server.go:1280] "Started kubelet" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.095312 4975 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.096044 4975 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.096044 4975 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.097001 4975 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 26 00:06:56 crc systemd[1]: Started Kubernetes Kubelet. Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.097757 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.097787 4975 server.go:460] "Adding debug handlers to kubelet server" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.097966 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 04:54:02.112366457 +0000 UTC Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.097787 4975 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.098350 4975 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.098582 4975 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.098602 4975 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.098675 4975 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.098983 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="200ms" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.099016 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.099074 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.097855 4975 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.193:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188e1f34ffe6955d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 00:06:56.095245661 +0000 UTC m=+0.216451175,LastTimestamp:2026-01-26 00:06:56.095245661 +0000 UTC m=+0.216451175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.100378 4975 factory.go:55] Registering systemd factory Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.100575 4975 factory.go:221] Registration of the systemd container factory successfully Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101062 4975 factory.go:153] Registering CRI-O factory Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101084 4975 factory.go:221] Registration of the crio container factory successfully Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101140 4975 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101160 4975 factory.go:103] Registering Raw factory Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101174 4975 manager.go:1196] Started watching for new ooms in manager Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.101882 4975 manager.go:319] Starting recovery of all containers Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.121646 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122001 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122032 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122050 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122078 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122119 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122248 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122267 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122282 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122316 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122332 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122346 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122393 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122410 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122421 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122432 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122447 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122459 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122471 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122482 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122495 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122508 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122520 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122534 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122561 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122573 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122586 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122599 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122612 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122624 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122637 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122648 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122662 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122678 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122691 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122707 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122722 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122752 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122765 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122777 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122840 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122852 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122865 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122933 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122945 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122958 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122970 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.122983 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123001 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123014 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123026 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123060 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123079 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123092 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123105 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123120 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123133 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123145 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123157 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123169 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123186 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123249 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123267 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123284 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123300 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123311 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123323 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123337 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123354 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123366 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123379 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123417 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123454 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123468 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123480 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123495 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123507 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123524 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123539 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123551 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123568 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123584 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123597 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123609 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123622 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123634 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123646 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123657 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123670 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123683 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123696 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123708 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123719 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123745 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123763 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123775 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123792 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123809 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123822 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123833 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123846 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123877 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123890 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123902 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123896 4975 manager.go:324] Recovery completed Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123925 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123957 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123975 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.123986 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124003 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124018 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124040 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124054 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124067 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124080 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124780 4975 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.124830 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.125228 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.125252 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.125468 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126091 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126255 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126286 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126305 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126346 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126389 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126405 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126425 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126440 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126486 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126511 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126536 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126549 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126563 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126576 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126591 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126628 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126656 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126669 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126682 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126694 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126717 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126758 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126776 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126811 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126833 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126849 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126862 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126874 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126887 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126922 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126945 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.126991 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127016 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127029 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127047 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127064 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127076 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127086 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127101 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127115 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127141 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127165 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127207 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127221 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127233 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127264 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127277 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127294 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127310 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127323 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127337 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127358 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127372 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127385 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127399 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127411 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127428 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127440 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127452 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127464 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127476 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127489 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127509 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127536 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127553 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127572 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127586 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127610 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127622 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127634 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127645 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127663 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127691 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127708 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127748 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127761 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127773 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127784 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127796 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127839 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127868 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127880 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127896 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127909 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127920 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127931 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.127942 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.128294 4975 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.128313 4975 reconstruct.go:97] "Volume reconstruction finished" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.128322 4975 reconciler.go:26] "Reconciler: start to sync state" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.134717 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136234 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136270 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136285 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136853 4975 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136872 4975 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.136893 4975 state_mem.go:36] "Initialized new in-memory state store" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.144131 4975 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.145303 4975 policy_none.go:49] "None policy: Start" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.145921 4975 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.145958 4975 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.145984 4975 kubelet.go:2335] "Starting kubelet main sync loop" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.146024 4975 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.147003 4975 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.147003 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.147053 4975 state_mem.go:35] "Initializing new in-memory state store" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.147061 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.199517 4975 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.201698 4975 manager.go:334] "Starting Device Plugin manager" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.201768 4975 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.201780 4975 server.go:79] "Starting device plugin registration server" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.202188 4975 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.202205 4975 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.202346 4975 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.202575 4975 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.202590 4975 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.209453 4975 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.246325 4975 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.246416 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.247376 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.247419 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.247431 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.248656 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.249444 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.249997 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250183 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250238 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250252 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250452 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250627 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.250672 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251149 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251226 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251468 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251498 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251512 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251655 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251818 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.251862 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252670 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252694 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252718 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252699 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252789 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.252757 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253035 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253037 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253184 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253248 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253128 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.253416 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254253 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254285 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254299 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254453 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254474 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.254504 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.255165 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.255186 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.255196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.299758 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="400ms" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.302327 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.303406 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.303425 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.303433 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.303447 4975 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.303661 4975 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.193:6443: connect: connection refused" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330513 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330580 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330626 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330669 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330711 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330790 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330844 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330865 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330881 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330907 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330922 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330936 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.330969 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.331021 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.331086 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431722 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431796 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431817 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431839 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431864 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431884 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431905 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431926 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431945 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431965 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.431984 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432005 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432023 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432041 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432062 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432072 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432086 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432147 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432170 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432200 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432191 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432272 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432305 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432237 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432343 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432340 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432345 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432345 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432381 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.432322 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.504040 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.505207 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.505272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.505291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.505327 4975 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.506093 4975 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.193:6443: connect: connection refused" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.579120 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.600306 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-0b64997d4ff48a23882f7c103b5eaef60cd046637b3d215e8628d8ae5275fdc4 WatchSource:0}: Error finding container 0b64997d4ff48a23882f7c103b5eaef60cd046637b3d215e8628d8ae5275fdc4: Status 404 returned error can't find the container with id 0b64997d4ff48a23882f7c103b5eaef60cd046637b3d215e8628d8ae5275fdc4 Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.601504 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.612934 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.626300 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-1fe27796f9b44616d8362f5f69c5e0e3451477bd4910aa552b703ce21f1973a5 WatchSource:0}: Error finding container 1fe27796f9b44616d8362f5f69c5e0e3451477bd4910aa552b703ce21f1973a5: Status 404 returned error can't find the container with id 1fe27796f9b44616d8362f5f69c5e0e3451477bd4910aa552b703ce21f1973a5 Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.627103 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.628750 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b34904a046b79fa6f438e94c532fe16f61487036d25aeab705752f554a963dee WatchSource:0}: Error finding container b34904a046b79fa6f438e94c532fe16f61487036d25aeab705752f554a963dee: Status 404 returned error can't find the container with id b34904a046b79fa6f438e94c532fe16f61487036d25aeab705752f554a963dee Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.634501 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.642020 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5401cc259f2457b8f6737b65240ff9863aa6ec6312a48851db328a2d97f14cb0 WatchSource:0}: Error finding container 5401cc259f2457b8f6737b65240ff9863aa6ec6312a48851db328a2d97f14cb0: Status 404 returned error can't find the container with id 5401cc259f2457b8f6737b65240ff9863aa6ec6312a48851db328a2d97f14cb0 Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.652425 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-757c04822b72c605e586caacedd89bd5d02c52984904bcd144e0502ae11ecd1c WatchSource:0}: Error finding container 757c04822b72c605e586caacedd89bd5d02c52984904bcd144e0502ae11ecd1c: Status 404 returned error can't find the container with id 757c04822b72c605e586caacedd89bd5d02c52984904bcd144e0502ae11ecd1c Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.700934 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="800ms" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.906688 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.908659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.908692 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.908700 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:56 crc kubenswrapper[4975]: I0126 00:06:56.908720 4975 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.909118 4975 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.193:6443: connect: connection refused" node="crc" Jan 26 00:06:56 crc kubenswrapper[4975]: W0126 00:06:56.980065 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:56 crc kubenswrapper[4975]: E0126 00:06:56.980141 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:57 crc kubenswrapper[4975]: W0126 00:06:57.063895 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:57 crc kubenswrapper[4975]: E0126 00:06:57.063967 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.096062 4975 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:57 crc kubenswrapper[4975]: W0126 00:06:57.097665 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:57 crc kubenswrapper[4975]: E0126 00:06:57.097779 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.098666 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 01:28:51.674337534 +0000 UTC Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.152997 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.153117 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5401cc259f2457b8f6737b65240ff9863aa6ec6312a48851db328a2d97f14cb0"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155013 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245" exitCode=0 Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155055 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155093 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b34904a046b79fa6f438e94c532fe16f61487036d25aeab705752f554a963dee"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155191 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155950 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155982 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.155993 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157345 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157509 4975 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc" exitCode=0 Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157562 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157580 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1fe27796f9b44616d8362f5f69c5e0e3451477bd4910aa552b703ce21f1973a5"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157651 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157927 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157946 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.157958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.158514 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.158541 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.158551 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.159523 4975 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae" exitCode=0 Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.159593 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.159610 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"0b64997d4ff48a23882f7c103b5eaef60cd046637b3d215e8628d8ae5275fdc4"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.159673 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.160300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.160314 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.160321 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.160980 4975 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362" exitCode=0 Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.160998 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.161011 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"757c04822b72c605e586caacedd89bd5d02c52984904bcd144e0502ae11ecd1c"} Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.161060 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.161558 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.161571 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.161578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: E0126 00:06:57.501798 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="1.6s" Jan 26 00:06:57 crc kubenswrapper[4975]: W0126 00:06:57.593570 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.193:6443: connect: connection refused Jan 26 00:06:57 crc kubenswrapper[4975]: E0126 00:06:57.593646 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.193:6443: connect: connection refused" logger="UnhandledError" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.709584 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.710759 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.710796 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.710807 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:57 crc kubenswrapper[4975]: I0126 00:06:57.710834 4975 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.099811 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 05:25:53.770100071 +0000 UTC Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.164346 4975 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a" exitCode=0 Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.164411 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.164548 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.165614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.165650 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.165663 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.166023 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.166131 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.167312 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.167351 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.167365 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.168572 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.168602 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.168615 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.168689 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.173125 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.173155 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.173167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.176938 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.176970 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.176984 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.177063 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.177804 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.177829 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.177840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183135 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183165 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183177 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183189 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183200 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e"} Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183296 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.183768 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.184710 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.184760 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.184772 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:58 crc kubenswrapper[4975]: I0126 00:06:58.587526 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.100787 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 14:45:53.366694229 +0000 UTC Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.188448 4975 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d" exitCode=0 Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.188563 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.188979 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d"} Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.189096 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.189443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.189473 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.189482 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.190507 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.190547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:06:59 crc kubenswrapper[4975]: I0126 00:06:59.190556 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.101073 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 07:53:48.82452238 +0000 UTC Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.193919 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.193920 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a"} Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.193964 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e"} Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.193987 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17"} Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.194004 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1"} Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.194752 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.194792 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.194802 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.220546 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.220702 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.221526 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.221556 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.221570 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:00 crc kubenswrapper[4975]: I0126 00:07:00.276794 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.101511 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 17:05:03.898717778 +0000 UTC Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.202451 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.202506 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.202437 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611"} Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.203894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.203961 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.203961 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.204017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.204041 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.203977 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.568517 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.568676 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.568719 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.569902 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.569974 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.569995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.588636 4975 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 00:07:01 crc kubenswrapper[4975]: I0126 00:07:01.588692 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.013894 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.102618 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 22:43:53.611777403 +0000 UTC Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.102859 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.204662 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.204728 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.205917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.205952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.205966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.206295 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.206333 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:02 crc kubenswrapper[4975]: I0126 00:07:02.206343 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.071900 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.103121 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 21:10:28.424034096 +0000 UTC Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.206802 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.206986 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208061 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208126 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208144 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208714 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:03 crc kubenswrapper[4975]: I0126 00:07:03.208852 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.103900 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 18:58:02.595561691 +0000 UTC Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.517063 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.517211 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.519502 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.519557 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.519591 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:04 crc kubenswrapper[4975]: I0126 00:07:04.525019 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:05 crc kubenswrapper[4975]: I0126 00:07:05.104419 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 20:21:04.162904712 +0000 UTC Jan 26 00:07:05 crc kubenswrapper[4975]: I0126 00:07:05.212647 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:05 crc kubenswrapper[4975]: I0126 00:07:05.214061 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:05 crc kubenswrapper[4975]: I0126 00:07:05.214099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:05 crc kubenswrapper[4975]: I0126 00:07:05.214114 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.106366 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 13:46:45.972053762 +0000 UTC Jan 26 00:07:06 crc kubenswrapper[4975]: E0126 00:07:06.209567 4975 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.584593 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.584804 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.585949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.586225 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.586420 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.938905 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.939285 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.940475 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.940578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.940657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:06 crc kubenswrapper[4975]: I0126 00:07:06.943693 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:07 crc kubenswrapper[4975]: I0126 00:07:07.106713 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 08:05:17.835700508 +0000 UTC Jan 26 00:07:07 crc kubenswrapper[4975]: I0126 00:07:07.216302 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:07 crc kubenswrapper[4975]: I0126 00:07:07.217249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:07 crc kubenswrapper[4975]: I0126 00:07:07.217281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:07 crc kubenswrapper[4975]: I0126 00:07:07.217291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:07 crc kubenswrapper[4975]: E0126 00:07:07.712552 4975 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 26 00:07:07 crc kubenswrapper[4975]: E0126 00:07:07.725836 4975 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.188e1f34ffe6955d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 00:06:56.095245661 +0000 UTC m=+0.216451175,LastTimestamp:2026-01-26 00:06:56.095245661 +0000 UTC m=+0.216451175,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.096976 4975 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.107446 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 00:13:15.101631673 +0000 UTC Jan 26 00:07:08 crc kubenswrapper[4975]: E0126 00:07:08.186159 4975 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 26 00:07:08 crc kubenswrapper[4975]: W0126 00:07:08.639858 4975 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.639953 4975 trace.go:236] Trace[653667168]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 00:06:58.638) (total time: 10001ms): Jan 26 00:07:08 crc kubenswrapper[4975]: Trace[653667168]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:07:08.639) Jan 26 00:07:08 crc kubenswrapper[4975]: Trace[653667168]: [10.001115206s] [10.001115206s] END Jan 26 00:07:08 crc kubenswrapper[4975]: E0126 00:07:08.639979 4975 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.838504 4975 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.838772 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.858553 4975 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 26 00:07:08 crc kubenswrapper[4975]: I0126 00:07:08.858836 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.108154 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 05:05:13.992920012 +0000 UTC Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.313344 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.314894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.314958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.314977 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:09 crc kubenswrapper[4975]: I0126 00:07:09.315012 4975 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 26 00:07:10 crc kubenswrapper[4975]: I0126 00:07:10.108853 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 14:40:30.188120329 +0000 UTC Jan 26 00:07:11 crc kubenswrapper[4975]: I0126 00:07:11.109356 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 19:05:25.000902485 +0000 UTC Jan 26 00:07:11 crc kubenswrapper[4975]: I0126 00:07:11.589496 4975 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 26 00:07:11 crc kubenswrapper[4975]: I0126 00:07:11.589607 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.109767 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 20:23:07.912842657 +0000 UTC Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.113084 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.113419 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.115084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.115136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.115158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.120310 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.188718 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.209876 4975 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.228526 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.228608 4975 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.230283 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.230339 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:12 crc kubenswrapper[4975]: I0126 00:07:12.230358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.110033 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 02:44:44.74430448 +0000 UTC Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.200286 4975 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 00:07:13 crc kubenswrapper[4975]: E0126 00:07:13.854281 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.856680 4975 trace.go:236] Trace[244881270]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 00:07:00.085) (total time: 13771ms): Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[244881270]: ---"Objects listed" error: 13771ms (00:07:13.856) Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[244881270]: [13.771276602s] [13.771276602s] END Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.856711 4975 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.857478 4975 trace.go:236] Trace[1034956658]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 00:07:00.065) (total time: 13792ms): Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[1034956658]: ---"Objects listed" error: 13792ms (00:07:13.857) Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[1034956658]: [13.792158393s] [13.792158393s] END Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.857632 4975 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.858902 4975 trace.go:236] Trace[150054349]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (26-Jan-2026 00:06:59.915) (total time: 13943ms): Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[150054349]: ---"Objects listed" error: 13943ms (00:07:13.858) Jan 26 00:07:13 crc kubenswrapper[4975]: Trace[150054349]: [13.943676944s] [13.943676944s] END Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.858944 4975 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 00:07:13 crc kubenswrapper[4975]: I0126 00:07:13.859393 4975 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012045 4975 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45566->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012118 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45566->192.168.126.11:17697: read: connection reset by peer" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012252 4975 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45580->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012275 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:45580->192.168.126.11:17697: read: connection reset by peer" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012555 4975 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.012582 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.098528 4975 apiserver.go:52] "Watching apiserver" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.102369 4975 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.102806 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.103293 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.103655 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.103943 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.104138 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.104176 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.104185 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.104220 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.104980 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.105051 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.106069 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.106283 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.106443 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.106466 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.107966 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.108248 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.108664 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.110577 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 00:11:23.95481432 +0000 UTC Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.115280 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.115691 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.137402 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.155504 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.173312 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.184969 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.197757 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.200225 4975 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.211279 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.223516 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.234160 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.236150 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66" exitCode=255 Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.236189 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.246138 4975 scope.go:117] "RemoveContainer" containerID="80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.247394 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.248781 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.262975 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263029 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263082 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263114 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263142 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263173 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263205 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263232 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263261 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263291 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263456 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263720 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263784 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263819 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263847 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263878 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263913 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263947 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263975 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263973 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264006 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264039 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264066 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264096 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264122 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264149 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264179 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264212 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264282 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264335 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264662 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264707 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265063 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265103 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265131 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265164 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265199 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265365 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265394 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265428 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265458 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265488 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265521 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265554 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265585 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265614 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265645 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265691 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265721 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265767 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265802 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265835 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265869 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265896 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265962 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265993 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266024 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266054 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266089 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266117 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266146 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266176 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266208 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266239 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266272 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266306 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266334 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266364 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263564 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266392 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266464 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266502 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266532 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266561 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266595 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266626 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266651 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266678 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266701 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266749 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266789 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266817 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266842 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266868 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266892 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266915 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266955 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267030 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267054 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267079 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267107 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267223 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267252 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267281 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267311 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267343 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267371 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267404 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267437 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267466 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267493 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267531 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267559 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267585 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267613 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267640 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.263995 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264193 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268463 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264321 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.264362 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265042 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265178 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.265589 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266360 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266602 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266609 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.266983 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.267376 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268009 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268041 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268319 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268487 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268769 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.268932 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269119 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269246 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269418 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269621 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269945 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.269982 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.270469 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.270544 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271002 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271013 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271129 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271175 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271254 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271283 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271310 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271334 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271356 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271376 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271395 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271415 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271436 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271455 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271479 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271497 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271518 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271535 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271554 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271570 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271587 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271603 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271622 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271639 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271657 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271676 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271693 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271711 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271751 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271775 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271798 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271824 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271847 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271866 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271892 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271920 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271944 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271967 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271992 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272017 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272043 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272068 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272122 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272151 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272213 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272244 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272304 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272334 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272363 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272388 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272413 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272490 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272527 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272555 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272586 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272614 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272642 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272671 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272691 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272710 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272745 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272770 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271128 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271300 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.271700 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272197 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272470 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.273720 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.274332 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.274813 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275162 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275231 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275604 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275646 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275824 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.275874 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.276402 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.276719 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.276864 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.276862 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.277028 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.277189 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.272794 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.277582 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.277337 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278201 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278126 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278385 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278585 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278599 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278640 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278659 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278925 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.278988 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279032 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279062 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279095 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279286 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279299 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279469 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279545 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279629 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279709 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279787 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279856 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280090 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280138 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280214 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280356 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281785 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281844 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281875 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281937 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281966 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287813 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289238 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289314 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290196 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290450 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290547 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290588 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290618 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290650 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290791 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290849 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290899 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290951 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290996 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291056 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291357 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291427 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291470 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291505 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279311 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279522 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279607 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279995 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280001 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.279646 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280668 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.280889 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281247 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281352 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281661 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281626 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281764 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.281863 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282265 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282298 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282447 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282484 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282822 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.282843 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.283282 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295642 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.283544 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.283722 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.283785 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.283878 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284356 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284403 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284601 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284768 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284813 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.284861 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.285169 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.285348 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.285227 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.285677 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.286526 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287265 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287290 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287540 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287812 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288098 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288332 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288352 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288380 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288390 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288690 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288721 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288758 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289250 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289388 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289381 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.288787 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289535 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289556 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289655 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289660 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289684 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289679 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289683 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289696 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289774 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.289679 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290027 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290286 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290301 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290397 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290429 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290622 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290684 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.290914 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291272 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291470 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291514 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291676 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291877 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.294240 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.287691 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.294721 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.294813 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295162 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295029 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295157 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295485 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.295899 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.296408 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.291546 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.296607 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.297212 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.297345 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:14.797305702 +0000 UTC m=+18.918511196 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.297480 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:14.797469677 +0000 UTC m=+18.918675171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.297556 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.297693 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.298004 4975 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.298139 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299130 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299168 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299185 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299198 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299209 4975 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299221 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299233 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299244 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299255 4975 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299266 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299277 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299288 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299297 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299308 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299319 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299329 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299339 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299351 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299360 4975 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299370 4975 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299381 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299392 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299402 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299411 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299422 4975 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299433 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299443 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299452 4975 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299460 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299472 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299481 4975 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299490 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299499 4975 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299510 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299520 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299529 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299538 4975 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299549 4975 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299558 4975 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299567 4975 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299577 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299586 4975 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299594 4975 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299604 4975 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299616 4975 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299625 4975 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299634 4975 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299643 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299652 4975 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299662 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299690 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299704 4975 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299713 4975 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299722 4975 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299747 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299758 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299767 4975 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299778 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299788 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299800 4975 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299812 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299822 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299832 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299841 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299850 4975 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299860 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299869 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299877 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299885 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299895 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299904 4975 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299912 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299921 4975 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299933 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299942 4975 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299954 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299962 4975 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299973 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299982 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.299992 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300002 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300011 4975 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300022 4975 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300034 4975 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300043 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300053 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300062 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300071 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300080 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300091 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300101 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.300109 4975 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.300217 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:14.800206219 +0000 UTC m=+18.921411713 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.301092 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.309441 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.309461 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.309473 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.309525 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:14.809512168 +0000 UTC m=+18.930717662 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.312625 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.313368 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.314186 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.314777 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.315119 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.316206 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.316329 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.318335 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.316479 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.319389 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.321478 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.326370 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.330360 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.329704 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.334094 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.334148 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.334167 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.334237 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.334258 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:14.834226088 +0000 UTC m=+18.955431792 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.336588 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.336986 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.337304 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.339418 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.340936 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.341066 4975 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.341130 4975 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.341328 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.345011 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.345344 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.345414 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.346958 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.347051 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.347641 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.348483 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.349037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.349084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.349099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.349123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.349136 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.361596 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.361965 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.362466 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.363159 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.363824 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.364245 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.364314 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.364862 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.364880 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.364904 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.365108 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.365303 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.365285 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.365380 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.366774 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.366881 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.366954 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.367103 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.367503 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.371027 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.372861 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.372925 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.373130 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.374491 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.375048 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.377192 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.377594 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.377725 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.382085 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401006 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401054 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401093 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401104 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401113 4975 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401121 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401129 4975 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401137 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401148 4975 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401156 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401165 4975 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401173 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401181 4975 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401189 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401197 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401205 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401213 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401223 4975 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401231 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401239 4975 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401247 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401255 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401263 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401271 4975 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401282 4975 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401290 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401299 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401307 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401316 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401324 4975 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401332 4975 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401340 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401350 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401358 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401366 4975 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401374 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401384 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401393 4975 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401401 4975 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401409 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401417 4975 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401425 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401435 4975 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401443 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401450 4975 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401458 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401467 4975 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401474 4975 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401481 4975 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401490 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401497 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401505 4975 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401513 4975 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401522 4975 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401530 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401538 4975 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401561 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401577 4975 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401590 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401599 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401607 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401616 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401625 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401633 4975 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401641 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401650 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401658 4975 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401667 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401676 4975 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401684 4975 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401693 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401702 4975 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401711 4975 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401719 4975 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401727 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401771 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401780 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401788 4975 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401797 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401807 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401815 4975 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401824 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401834 4975 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401844 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401855 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401869 4975 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401877 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401886 4975 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401894 4975 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401902 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401910 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401918 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401927 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401935 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401944 4975 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401954 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401962 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401970 4975 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401979 4975 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401987 4975 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.401996 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.402004 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.402014 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.402058 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.402196 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.409091 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.409205 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.409289 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.409503 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.409630 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.417553 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.420032 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.429397 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.432127 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.434743 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.434823 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.434890 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.434949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.435007 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: W0126 00:07:14.442643 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-14bd6f550d3d1d66a4b784606cb1951738c087a943afbd6b184daa0c12deea4e WatchSource:0}: Error finding container 14bd6f550d3d1d66a4b784606cb1951738c087a943afbd6b184daa0c12deea4e: Status 404 returned error can't find the container with id 14bd6f550d3d1d66a4b784606cb1951738c087a943afbd6b184daa0c12deea4e Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.442723 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.448065 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.449496 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.453679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.453873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.453884 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.453903 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.453914 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.465193 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.468865 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.468917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.468932 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.468950 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.468964 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.478823 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.479367 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.488435 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.489273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.489463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.489635 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.489821 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.502926 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.502962 4975 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.592049 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.592109 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.592121 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.592144 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.592161 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.695084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.695128 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.695140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.695159 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.695172 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.797991 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.798026 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.798037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.798052 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.798061 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.804577 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.804639 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.804669 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.804759 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:15.804721822 +0000 UTC m=+19.925927316 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.804777 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.804822 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:15.804809515 +0000 UTC m=+19.926015009 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.804850 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.804887 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:15.804879367 +0000 UTC m=+19.926084861 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.900523 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.900562 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.900574 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.900591 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.900602 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:14Z","lastTransitionTime":"2026-01-26T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.904955 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:14 crc kubenswrapper[4975]: I0126 00:07:14.904999 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905115 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905137 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905142 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905152 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905156 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905163 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905202 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:15.90518822 +0000 UTC m=+20.026393714 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:14 crc kubenswrapper[4975]: E0126 00:07:14.905215 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:15.905210101 +0000 UTC m=+20.026415595 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.002542 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.002585 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.002596 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.002612 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.002625 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.108519 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.108563 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.108577 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.108595 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.108609 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.111711 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 11:49:20.630708939 +0000 UTC Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.198424 4975 csr.go:261] certificate signing request csr-kvwfk is approved, waiting to be issued Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.211412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.211452 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.211462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.211476 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.211485 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.244191 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.244234 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.244250 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9b710635bad28233ffcd46b5acf56592f2a0332cf6738209315a1b2804b4aa51"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.245290 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"74b578cb250bb6fc1bd29a0cc2d48bbf3bc9706800d54fc1868a26df1e7d904d"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.246298 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.246325 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"14bd6f550d3d1d66a4b784606cb1951738c087a943afbd6b184daa0c12deea4e"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.247908 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.249833 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.250374 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.266088 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.290002 4975 csr.go:257] certificate signing request csr-kvwfk is issued Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.298493 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.314146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.314182 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.314191 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.314205 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.314224 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.326013 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.330983 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-ckf4p"] Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.331327 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.333459 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.333582 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.333468 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.333725 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.353320 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.367797 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.381340 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.402027 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417181 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417219 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417229 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417263 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.417593 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.427885 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-serviceca\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.427956 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-host\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.427990 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5xfk\" (UniqueName: \"kubernetes.io/projected/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-kube-api-access-m5xfk\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.434913 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.456853 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.519999 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.520060 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.520073 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.520096 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.520109 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.522003 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.531210 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-serviceca\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.531262 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-host\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.531284 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5xfk\" (UniqueName: \"kubernetes.io/projected/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-kube-api-access-m5xfk\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.531377 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-host\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.532698 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-serviceca\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.566843 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5xfk\" (UniqueName: \"kubernetes.io/projected/66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7-kube-api-access-m5xfk\") pod \"node-ca-ckf4p\" (UID: \"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\") " pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.581093 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.612032 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.621874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.621917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.621931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.621949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.621959 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.624153 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.641805 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-ckf4p" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.642063 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: W0126 00:07:15.656827 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66a9d0dc_feb7_4b88_a6a6_d0ceb9bfb0f7.slice/crio-b9d188d52c7da2c6c12a9be00d78544bf4758f589bf51b45ae6af2fb4b584166 WatchSource:0}: Error finding container b9d188d52c7da2c6c12a9be00d78544bf4758f589bf51b45ae6af2fb4b584166: Status 404 returned error can't find the container with id b9d188d52c7da2c6c12a9be00d78544bf4758f589bf51b45ae6af2fb4b584166 Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.710127 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vcvtm"] Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.710460 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.713108 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.713395 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.713551 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.726132 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.726154 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.726164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.726179 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.726191 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.727997 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.733173 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1e64c045-dfa3-4706-8600-03600ca4980c-hosts-file\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.733226 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dt2sz\" (UniqueName: \"kubernetes.io/projected/1e64c045-dfa3-4706-8600-03600ca4980c-kube-api-access-dt2sz\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.744015 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.762749 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.776472 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.789367 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.801494 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.813432 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828507 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828571 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828581 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.828806 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835526 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835620 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1e64c045-dfa3-4706-8600-03600ca4980c-hosts-file\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835644 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dt2sz\" (UniqueName: \"kubernetes.io/projected/1e64c045-dfa3-4706-8600-03600ca4980c-kube-api-access-dt2sz\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835662 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835682 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.835788 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/1e64c045-dfa3-4706-8600-03600ca4980c-hosts-file\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.835707 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:17.835679035 +0000 UTC m=+21.956884529 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.835818 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.835821 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.835968 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:17.835956103 +0000 UTC m=+21.957161687 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.835990 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:17.835982974 +0000 UTC m=+21.957188588 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.843101 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.859464 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dt2sz\" (UniqueName: \"kubernetes.io/projected/1e64c045-dfa3-4706-8600-03600ca4980c-kube-api-access-dt2sz\") pod \"node-resolver-vcvtm\" (UID: \"1e64c045-dfa3-4706-8600-03600ca4980c\") " pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.930949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.930989 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.930998 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.931016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.931026 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:15Z","lastTransitionTime":"2026-01-26T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.936613 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:15 crc kubenswrapper[4975]: I0126 00:07:15.936657 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936814 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936837 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936849 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936897 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:17.936880244 +0000 UTC m=+22.058085738 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936950 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936960 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936967 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:15 crc kubenswrapper[4975]: E0126 00:07:15.936988 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:17.936982118 +0000 UTC m=+22.058187612 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.020782 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vcvtm" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.028379 4975 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.028578 4975 reflector.go:484] object-"openshift-image-registry"/"node-ca-dockercfg-4777p": watch of *v1.Secret ended with: very short watch: object-"openshift-image-registry"/"node-ca-dockercfg-4777p": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.028617 4975 reflector.go:484] object-"openshift-image-registry"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.028641 4975 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.029490 4975 reflector.go:484] object-"openshift-image-registry"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.030225 4975 reflector.go:484] object-"openshift-image-registry"/"image-registry-certificates": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-image-registry"/"image-registry-certificates": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.030559 4975 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.030595 4975 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.041364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.041396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.041407 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.041427 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.041439 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.112301 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 12:52:30.441152095 +0000 UTC Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.148807 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:16 crc kubenswrapper[4975]: E0126 00:07:16.149091 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.149122 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:16 crc kubenswrapper[4975]: E0126 00:07:16.149295 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.148881 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:16 crc kubenswrapper[4975]: E0126 00:07:16.149471 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.151121 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.151151 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.151159 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.151173 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.151186 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.155623 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.156296 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.158393 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.159215 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.160310 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.160900 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.161519 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.162578 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.165204 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.167038 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.167642 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.169523 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.170131 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.171516 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.172590 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.173106 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.174053 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.174438 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.175006 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.178108 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.183499 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.184335 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.186294 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.186835 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.188075 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.188487 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.189115 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.190444 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.190763 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.190911 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.191826 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.192319 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.193135 4975 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.193236 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.195082 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.196136 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.196685 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.198418 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.199202 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.200468 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.201219 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.202263 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.202870 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.203858 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.204551 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.206042 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.206648 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.210563 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.211378 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.212650 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.213475 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.214565 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.215172 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.215783 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.219308 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.229162 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.232639 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.233148 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-f42fk"] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.233478 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-bcsb4"] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.233635 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-jpmlj"] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.234160 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2vrv2"] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.235081 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.235378 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.235649 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.235967 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.240721 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241020 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241078 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241050 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241332 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241538 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241701 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241815 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241819 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.241953 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.242235 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.242420 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.242530 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.242663 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.242817 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.245125 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.245429 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.245615 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.245755 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.245877 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258220 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258248 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258268 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258278 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.258459 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vcvtm" event={"ID":"1e64c045-dfa3-4706-8600-03600ca4980c","Type":"ContainerStarted","Data":"5b0cb1d4a89f0f9755a648e4d95d149202afe89b3f5d955c53092f2fd3523162"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.261880 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ckf4p" event={"ID":"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7","Type":"ContainerStarted","Data":"547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.261918 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-ckf4p" event={"ID":"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7","Type":"ContainerStarted","Data":"b9d188d52c7da2c6c12a9be00d78544bf4758f589bf51b45ae6af2fb4b584166"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.270322 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.290808 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-26 00:02:15 +0000 UTC, rotation deadline is 2026-10-28 08:14:04.401803709 +0000 UTC Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.290871 4975 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6608h6m48.110934878s for next certificate rotation Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.336021 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342565 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b76c31fb-14ea-4b49-8a41-0b2731967b86-rootfs\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342604 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-multus\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342622 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-os-release\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342640 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342657 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-netns\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342713 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342742 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmrkv\" (UniqueName: \"kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342764 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-kubelet\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342786 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342800 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9dbd\" (UniqueName: \"kubernetes.io/projected/7d3cba21-428c-4151-bb16-f3478d54c90e-kube-api-access-m9dbd\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342822 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342837 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342851 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-system-cni-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342872 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-cni-binary-copy\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342886 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-etc-kubernetes\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342899 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342915 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-daemon-config\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342931 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-multus-certs\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342946 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wskkd\" (UniqueName: \"kubernetes.io/projected/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-kube-api-access-wskkd\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342969 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.342987 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343003 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-cnibin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343016 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343030 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-bin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343046 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343060 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b76c31fb-14ea-4b49-8a41-0b2731967b86-proxy-tls\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343074 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-k8s-cni-cncf-io\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343090 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343105 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343124 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b76c31fb-14ea-4b49-8a41-0b2731967b86-mcd-auth-proxy-config\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343139 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-socket-dir-parent\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343153 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343172 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cnibin\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343193 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-conf-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343208 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343223 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343252 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343276 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343290 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343305 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qhgz\" (UniqueName: \"kubernetes.io/projected/b76c31fb-14ea-4b49-8a41-0b2731967b86-kube-api-access-7qhgz\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343320 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-system-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343334 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-os-release\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343351 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343365 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343380 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343394 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-hostroot\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343414 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.343434 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.361975 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.362002 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.362010 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.362023 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.362031 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.363370 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.382126 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.395306 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.406376 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.443978 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-system-cni-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444024 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-cni-binary-copy\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444041 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-etc-kubernetes\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444058 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444075 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-daemon-config\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444089 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-multus-certs\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444103 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wskkd\" (UniqueName: \"kubernetes.io/projected/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-kube-api-access-wskkd\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444139 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444157 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444170 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-cnibin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444185 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444199 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-bin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444214 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444229 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b76c31fb-14ea-4b49-8a41-0b2731967b86-proxy-tls\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444243 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-k8s-cni-cncf-io\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444258 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444273 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444289 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b76c31fb-14ea-4b49-8a41-0b2731967b86-mcd-auth-proxy-config\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444302 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-socket-dir-parent\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444316 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444330 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cnibin\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444346 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-conf-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444360 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444376 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444391 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444406 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444420 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444436 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qhgz\" (UniqueName: \"kubernetes.io/projected/b76c31fb-14ea-4b49-8a41-0b2731967b86-kube-api-access-7qhgz\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444453 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-system-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444467 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-os-release\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444483 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444497 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444512 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444526 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-hostroot\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444555 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444582 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444602 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b76c31fb-14ea-4b49-8a41-0b2731967b86-rootfs\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444619 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-multus\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444634 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-os-release\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444649 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444662 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-netns\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444678 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444698 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmrkv\" (UniqueName: \"kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444717 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-kubelet\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444757 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444781 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9dbd\" (UniqueName: \"kubernetes.io/projected/7d3cba21-428c-4151-bb16-f3478d54c90e-kube-api-access-m9dbd\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444819 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.444847 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.447496 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.447579 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-system-cni-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.448345 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-cni-binary-copy\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.448383 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-etc-kubernetes\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.448415 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.448831 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-daemon-config\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.448866 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-multus-certs\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449088 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449120 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449154 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-cnibin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449178 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449202 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-bin\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.449224 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.450989 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-binary-copy\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451050 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-k8s-cni-cncf-io\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451078 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451101 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451581 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b76c31fb-14ea-4b49-8a41-0b2731967b86-mcd-auth-proxy-config\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451631 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-socket-dir-parent\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451659 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451690 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cnibin\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.451762 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-conf-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452010 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452003 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-system-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452319 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-os-release\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452362 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452385 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452406 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-hostroot\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452427 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452572 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-multus-cni-dir\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452572 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452616 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/b76c31fb-14ea-4b49-8a41-0b2731967b86-rootfs\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452644 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-cni-multus\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452667 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-run-netns\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452692 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.452964 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.453021 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-host-var-lib-kubelet\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.453054 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/7d3cba21-428c-4151-bb16-f3478d54c90e-os-release\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.453077 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.453271 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.453277 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.454837 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b76c31fb-14ea-4b49-8a41-0b2731967b86-proxy-tls\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.454849 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.464336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.464371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.464381 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.464396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.464406 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.466519 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.469547 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wskkd\" (UniqueName: \"kubernetes.io/projected/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-kube-api-access-wskkd\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.476571 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmrkv\" (UniqueName: \"kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv\") pod \"ovnkube-node-2vrv2\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.479111 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qhgz\" (UniqueName: \"kubernetes.io/projected/b76c31fb-14ea-4b49-8a41-0b2731967b86-kube-api-access-7qhgz\") pod \"machine-config-daemon-f42fk\" (UID: \"b76c31fb-14ea-4b49-8a41-0b2731967b86\") " pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.480054 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9dbd\" (UniqueName: \"kubernetes.io/projected/7d3cba21-428c-4151-bb16-f3478d54c90e-kube-api-access-m9dbd\") pod \"multus-bcsb4\" (UID: \"7d3cba21-428c-4151-bb16-f3478d54c90e\") " pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.484267 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45c4e01d-bac9-49dd-9be1-bd759f38f2a8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jpmlj\" (UID: \"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\") " pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.487636 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.504608 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.518837 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.536814 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.548087 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.555508 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.558938 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.567174 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.567210 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.567224 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.567240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.567250 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.588902 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.629140 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.645285 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.647359 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.655819 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.668515 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bcsb4" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.669908 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.669936 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.669944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.669958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.669968 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.671618 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.674839 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.703896 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fd68329_6540_4965_a036_ddd1045f1190.slice/crio-6b518071003e1a2cc17fda3b3c14c83f7f20b84c9da657d4f026e09f7ac2e4f1 WatchSource:0}: Error finding container 6b518071003e1a2cc17fda3b3c14c83f7f20b84c9da657d4f026e09f7ac2e4f1: Status 404 returned error can't find the container with id 6b518071003e1a2cc17fda3b3c14c83f7f20b84c9da657d4f026e09f7ac2e4f1 Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.715178 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb76c31fb_14ea_4b49_8a41_0b2731967b86.slice/crio-a703a85ef252cc6d4f47cd171fa7899e91215a17118f97186210f2f33049b9c9 WatchSource:0}: Error finding container a703a85ef252cc6d4f47cd171fa7899e91215a17118f97186210f2f33049b9c9: Status 404 returned error can't find the container with id a703a85ef252cc6d4f47cd171fa7899e91215a17118f97186210f2f33049b9c9 Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.717501 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 26 00:07:16 crc kubenswrapper[4975]: W0126 00:07:16.727019 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod45c4e01d_bac9_49dd_9be1_bd759f38f2a8.slice/crio-cf1598ae1b884c1a3c1d6d4fd5667f49e00d254826bf94c5a314dd7c26987a5d WatchSource:0}: Error finding container cf1598ae1b884c1a3c1d6d4fd5667f49e00d254826bf94c5a314dd7c26987a5d: Status 404 returned error can't find the container with id cf1598ae1b884c1a3c1d6d4fd5667f49e00d254826bf94c5a314dd7c26987a5d Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.733349 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.739425 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.740809 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.760495 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.772698 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.772752 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.772766 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.772783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.772794 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.773458 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.791850 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.806073 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.829637 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.842934 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.860780 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.875272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.875299 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.875309 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.875322 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.875331 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.886025 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.886125 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.908500 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.931347 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.935877 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.945527 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.952120 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.958295 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.975089 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.978631 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.978659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.978669 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.978683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.978691 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:16Z","lastTransitionTime":"2026-01-26T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:16 crc kubenswrapper[4975]: I0126 00:07:16.993852 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.003994 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.017712 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.036287 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.046280 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.070887 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.080709 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.080756 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.080765 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.080781 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.080790 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.082413 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.095848 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.110401 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.113440 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 22:24:05.589837602 +0000 UTC Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.135913 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.148798 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.163823 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.164056 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.188110 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.188139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.188151 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.188165 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.188175 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.191189 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.231756 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.270121 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerStarted","Data":"e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.270162 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerStarted","Data":"a6a3f0dbb06782bfe776b9bec4aac60f04b0396f53b5912039110a9090575870"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.271316 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vcvtm" event={"ID":"1e64c045-dfa3-4706-8600-03600ca4980c","Type":"ContainerStarted","Data":"20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.272260 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerStarted","Data":"2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.272281 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerStarted","Data":"cf1598ae1b884c1a3c1d6d4fd5667f49e00d254826bf94c5a314dd7c26987a5d"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.273493 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.273513 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.273522 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"a703a85ef252cc6d4f47cd171fa7899e91215a17118f97186210f2f33049b9c9"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.274626 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" exitCode=0 Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.275145 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.275163 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"6b518071003e1a2cc17fda3b3c14c83f7f20b84c9da657d4f026e09f7ac2e4f1"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.294384 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.294423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.294436 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.294469 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.294481 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.298897 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.344590 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.400806 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.401052 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.401061 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.401074 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.401083 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.403885 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.420257 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.430999 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.456848 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.466471 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.487152 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.507315 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.509293 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.509321 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.509329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.509344 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.509352 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.534885 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.554547 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.577984 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.578611 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.596855 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.610917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.611037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.611130 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.611196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.611258 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.612772 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.632503 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.713758 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.713803 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.713814 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.713833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.713850 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.818178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.818419 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.818508 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.818673 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.818784 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.859060 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.859179 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.859213 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.859242 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:21.85921369 +0000 UTC m=+25.980419194 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.859269 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.859320 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:21.859311143 +0000 UTC m=+25.980516637 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.859325 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.859382 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:21.859365365 +0000 UTC m=+25.980570869 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.920795 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.920833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.920845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.920862 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.920874 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:17Z","lastTransitionTime":"2026-01-26T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.960388 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:17 crc kubenswrapper[4975]: I0126 00:07:17.960454 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960574 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960593 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960605 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960652 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:21.960636476 +0000 UTC m=+26.081841970 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960710 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960721 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960746 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:17 crc kubenswrapper[4975]: E0126 00:07:17.960777 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:21.96076736 +0000 UTC m=+26.081972854 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.022819 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.022848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.022858 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.022873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.022882 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.114056 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 08:18:10.049819546 +0000 UTC Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.125477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.125506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.125514 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.125529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.125540 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.146161 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:18 crc kubenswrapper[4975]: E0126 00:07:18.146289 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.146337 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:18 crc kubenswrapper[4975]: E0126 00:07:18.146376 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.146412 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:18 crc kubenswrapper[4975]: E0126 00:07:18.146449 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.229462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.229787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.229799 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.229814 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.229825 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.280604 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06" exitCode=0 Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.280685 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.282117 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285525 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285551 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285563 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285575 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285587 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.285597 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.313770 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.328606 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.332277 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.332300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.332308 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.332321 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.332331 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.363269 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.378982 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.389815 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.405127 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.423559 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.433130 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.434516 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.434543 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.434553 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.434570 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.434580 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.448127 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.463476 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.477784 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.488626 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.505397 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.519945 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.532776 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.536310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.536340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.536351 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.536370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.536382 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.542109 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.558852 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.574667 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.593085 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.593509 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.597231 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.603266 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.608422 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.632446 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.640438 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.640705 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.640743 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.640761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.640774 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.652778 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.665510 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.682532 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.699522 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.714727 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.742139 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.743361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.743392 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.743401 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.743412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.743420 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.757714 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.777190 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.793852 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.808766 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.845655 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.845747 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.845759 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.845777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.845810 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.898366 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.917926 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.930289 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.947843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.948082 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.948154 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.948223 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.948302 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:18Z","lastTransitionTime":"2026-01-26T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:18 crc kubenswrapper[4975]: I0126 00:07:18.959907 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:18Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.011159 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.042489 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.050034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.050066 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.050076 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.050091 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.050103 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.082428 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.113029 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.115200 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 21:40:33.783265442 +0000 UTC Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.151711 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.151765 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.151777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.151794 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.151805 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.157213 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.192496 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.239064 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.253720 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.253775 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.253787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.253817 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.253829 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.276988 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.289771 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerStarted","Data":"2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.315203 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.356010 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.356049 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.356060 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.356077 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.356090 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.360301 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.394590 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.438076 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.458555 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.458590 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.458598 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.458613 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.458623 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.486250 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.516577 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.554140 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.560782 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.560817 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.560827 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.560842 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.560852 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.599073 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.634354 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.663482 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.663518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.663529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.663546 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.663555 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.681102 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.718911 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.757960 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.765606 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.765655 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.765669 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.765689 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.765702 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.794771 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.835202 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.868163 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.868371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.868477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.868564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.868650 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.874537 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:19Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.970434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.970675 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.970761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.970831 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:19 crc kubenswrapper[4975]: I0126 00:07:19.970894 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:19Z","lastTransitionTime":"2026-01-26T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.073137 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.073169 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.073177 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.073192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.073201 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.115324 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 01:58:38.56161144 +0000 UTC Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.146655 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.146673 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.146675 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:20 crc kubenswrapper[4975]: E0126 00:07:20.147265 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:20 crc kubenswrapper[4975]: E0126 00:07:20.147337 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:20 crc kubenswrapper[4975]: E0126 00:07:20.147393 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.186557 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.186604 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.186617 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.186634 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.186646 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.289123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.289329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.289439 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.289544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.289630 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.294056 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18" exitCode=0 Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.294115 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.307916 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.308387 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.324688 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.336877 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.348647 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.359147 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.379050 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.391560 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.392148 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.392267 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.392359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.392435 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.392547 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.416247 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.435486 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.457692 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.469985 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.480785 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.492046 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.494592 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.494626 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.494639 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.494657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.494669 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.504050 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.515976 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.597105 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.597155 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.597168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.597185 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.597198 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.699878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.699917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.699929 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.699944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.699955 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.802399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.802430 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.802440 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.802456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.802468 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.904532 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.904564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.904575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.904590 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:20 crc kubenswrapper[4975]: I0126 00:07:20.904601 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:20Z","lastTransitionTime":"2026-01-26T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.006333 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.006361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.006370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.006382 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.006391 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.109514 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.109558 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.109576 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.109597 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.109613 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.115541 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 20:04:37.727752063 +0000 UTC Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.212330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.212363 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.212373 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.212390 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.212402 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.312939 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006" exitCode=0 Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.312986 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.315258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.315278 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.315290 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.315306 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.315318 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.334014 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.363925 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.386881 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.419278 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.419316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.419327 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.419349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.419360 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.429211 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.451538 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.474117 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.490821 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.513425 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.522844 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.522884 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.522894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.522915 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.522925 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.527564 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.541539 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.557518 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.574521 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.586924 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.606198 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.620151 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:21Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.626250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.626305 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.626321 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.626345 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.626360 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.728148 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.728196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.728210 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.728227 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.728240 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.831394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.831480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.831511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.831540 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.831561 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.904055 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.904148 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.904174 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:21 crc kubenswrapper[4975]: E0126 00:07:21.904327 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:21 crc kubenswrapper[4975]: E0126 00:07:21.904337 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:29.904307383 +0000 UTC m=+34.025512907 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:21 crc kubenswrapper[4975]: E0126 00:07:21.904370 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:29.904362084 +0000 UTC m=+34.025567568 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:21 crc kubenswrapper[4975]: E0126 00:07:21.904415 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:21 crc kubenswrapper[4975]: E0126 00:07:21.904464 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:29.904449627 +0000 UTC m=+34.025655161 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.934080 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.934128 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.934139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.934158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:21 crc kubenswrapper[4975]: I0126 00:07:21.934170 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:21Z","lastTransitionTime":"2026-01-26T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.005091 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.005160 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005268 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005290 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005302 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005354 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:30.005338687 +0000 UTC m=+34.126544181 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005268 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005381 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005398 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.005446 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:30.0054284 +0000 UTC m=+34.126633914 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.036907 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.036942 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.036951 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.036966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.036975 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.115661 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 11:19:05.390039236 +0000 UTC Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.139442 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.139477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.139485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.139501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.139512 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.146744 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.146807 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.146826 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.146946 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.147013 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:22 crc kubenswrapper[4975]: E0126 00:07:22.147130 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.241933 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.241970 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.241978 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.241994 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.242003 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.318872 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9" exitCode=0 Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.318939 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.345685 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.346214 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.346254 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.346266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.346287 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.346313 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.361348 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.383069 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.397507 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.412690 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.430428 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.444565 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.448324 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.448371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.448445 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.448461 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.448474 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.457741 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.473417 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.486362 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.497315 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.510906 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.521454 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.542195 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551262 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551292 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551302 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.551867 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:22Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.653625 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.653668 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.653680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.653697 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.653709 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.755919 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.755955 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.755969 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.755985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.755997 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.863522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.863568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.863585 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.863608 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.863623 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.967431 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.967470 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.967484 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.967502 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:22 crc kubenswrapper[4975]: I0126 00:07:22.967514 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:22Z","lastTransitionTime":"2026-01-26T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.069362 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.069701 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.069711 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.069726 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.069761 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.116252 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 16:50:24.918874296 +0000 UTC Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.172019 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.172063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.172074 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.172090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.172110 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.274241 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.274290 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.274308 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.274329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.274344 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.325373 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.325586 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.327991 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerStarted","Data":"b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.345115 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.353100 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.364596 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.374896 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.376472 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.376530 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.376550 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.376575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.376593 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.390461 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.414964 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.432369 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.446769 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.477452 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.479030 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.479049 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.479057 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.479071 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.479080 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.497439 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.510776 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.524525 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.546811 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.559297 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.577050 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.580815 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.580863 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.580875 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.580892 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.580905 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.588995 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.600271 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.609855 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.617650 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.631534 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.639847 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.657815 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.672077 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.692474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.692506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.692516 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.692530 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.692541 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.697002 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.710544 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.726334 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.737571 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.749203 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.763049 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.773436 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.785300 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.794773 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.794803 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.794811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.794827 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.794838 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.897465 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.897511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.897524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.897541 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:23 crc kubenswrapper[4975]: I0126 00:07:23.897554 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:23Z","lastTransitionTime":"2026-01-26T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.000383 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.000419 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.000428 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.000444 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.000453 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.103369 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.103447 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.103470 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.103495 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.103513 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.116927 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 04:56:27.07489906 +0000 UTC Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.146935 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.147019 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.146935 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.147182 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.147306 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.147436 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.206153 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.206204 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.206220 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.206240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.206257 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.309783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.309853 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.309875 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.309907 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.310018 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.331414 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.332118 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.379017 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.395201 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.412199 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.412229 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.412240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.412256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.412270 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.420420 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.434963 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.458838 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.472593 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.486695 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.500339 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.512541 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.514116 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.514178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.514187 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.514201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.514210 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.525308 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.538052 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.552564 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.565849 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.576586 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.586083 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.599486 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.616501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.616539 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.616549 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.616565 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.616575 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.719033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.719098 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.719115 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.719140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.719160 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.784896 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.784940 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.784953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.784971 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.784982 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.800843 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.806627 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.806669 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.806683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.806708 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.806722 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.836670 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.842522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.842575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.842584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.842597 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.842605 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.855776 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.859586 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.859854 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.860037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.860170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.860314 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.875536 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.879358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.879387 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.879398 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.879413 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.879423 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.896697 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:24 crc kubenswrapper[4975]: E0126 00:07:24.897186 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.899084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.899116 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.899129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.899145 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:24 crc kubenswrapper[4975]: I0126 00:07:24.899155 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:24Z","lastTransitionTime":"2026-01-26T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.001410 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.001443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.001456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.001470 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.001482 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.104017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.104291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.104394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.104508 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.104661 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.117786 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 03:41:59.77360175 +0000 UTC Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.207220 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.207292 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.207317 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.207349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.207373 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.310878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.311001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.311027 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.311058 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.311079 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.340362 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9" exitCode=0 Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.340443 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.340607 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.371967 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.396464 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.414476 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.414518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.414526 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.414544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.414555 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.416354 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.434970 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.461672 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.478083 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.500551 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.516525 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.516562 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.516575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.516592 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.516603 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.518686 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.535257 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.552844 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.585791 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.606271 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.618923 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.618949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.618958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.618971 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.618980 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.624443 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.636629 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.650578 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:25Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.726275 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.726307 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.726315 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.726329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.726338 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.828473 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.828528 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.828542 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.828560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.828572 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.935205 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.935249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.935261 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.935279 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:25 crc kubenswrapper[4975]: I0126 00:07:25.935292 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:25Z","lastTransitionTime":"2026-01-26T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.038056 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.038108 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.038120 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.038139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.038150 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.118442 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 18:08:40.320757405 +0000 UTC Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.140394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.140432 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.140443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.140459 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.140471 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.146858 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:26 crc kubenswrapper[4975]: E0126 00:07:26.146945 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.147042 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:26 crc kubenswrapper[4975]: E0126 00:07:26.147148 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.147210 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:26 crc kubenswrapper[4975]: E0126 00:07:26.147257 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.173364 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.186613 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.196044 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.209637 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.223810 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.241498 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.242552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.242589 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.242599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.242613 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.242625 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.254008 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.273089 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.286872 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.303450 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.315062 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.325868 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.337081 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.344079 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.344119 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.344129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.344145 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.344155 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.346535 4975 generic.go:334] "Generic (PLEG): container finished" podID="45c4e01d-bac9-49dd-9be1-bd759f38f2a8" containerID="219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b" exitCode=0 Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.346587 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerDied","Data":"219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.347351 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.352247 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.365457 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.378936 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.390690 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.401141 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.427609 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.438018 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.448017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.448353 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.448364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.448383 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.448395 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.457511 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.468858 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.482137 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.507991 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.527528 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.543818 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.557467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.557513 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.557524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.557545 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.557556 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.567722 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.583100 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.596826 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.608396 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.659818 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.659866 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.659878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.659894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.659926 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.762149 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.762182 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.762192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.762206 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.762214 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.864298 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.864357 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.864374 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.864401 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.864421 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.967067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.967106 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.967118 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.967135 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:26 crc kubenswrapper[4975]: I0126 00:07:26.967146 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:26Z","lastTransitionTime":"2026-01-26T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.070068 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.070107 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.070117 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.070134 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.070144 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.119589 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 23:54:47.118647225 +0000 UTC Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.172942 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.172976 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.172985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.173000 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.173009 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.275846 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.275891 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.275901 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.275924 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.275936 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.353561 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/0.log" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.357894 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871" exitCode=1 Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.357993 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.358936 4975 scope.go:117] "RemoveContainer" containerID="b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.364448 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" event={"ID":"45c4e01d-bac9-49dd-9be1-bd759f38f2a8","Type":"ContainerStarted","Data":"54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.379501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.379562 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.379580 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.379606 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.379623 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.397238 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.415445 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.429984 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.443963 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.460272 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.477600 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.481656 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.481805 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.481953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.482053 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.482146 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.491521 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.503966 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.519539 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.531157 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.541823 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.557056 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.567856 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.584845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.584871 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.584879 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.584891 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.584899 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.586202 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.596683 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.606923 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.624723 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.637234 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.649706 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.662160 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.675108 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.687513 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.687539 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.687547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.687560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.687570 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.691212 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.705398 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.734716 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.751135 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.769383 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.782109 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.789802 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.789824 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.789833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.789846 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.789855 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.791636 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.804513 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.816470 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.891942 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.891995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.892005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.892024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.892036 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.993756 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.993794 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.993805 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.993819 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:27 crc kubenswrapper[4975]: I0126 00:07:27.993830 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:27Z","lastTransitionTime":"2026-01-26T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.096472 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.096504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.096512 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.096525 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.096534 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.119928 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 13:12:04.35727894 +0000 UTC Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.146446 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:28 crc kubenswrapper[4975]: E0126 00:07:28.146570 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.146660 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.146708 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:28 crc kubenswrapper[4975]: E0126 00:07:28.146792 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:28 crc kubenswrapper[4975]: E0126 00:07:28.146848 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.199117 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.199244 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.199389 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.199547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.199697 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.301591 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.301635 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.301644 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.301660 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.301670 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.372017 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/0.log" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.377511 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.377900 4975 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.391851 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.404955 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.405006 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.405018 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.405037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.405049 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.405809 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.416688 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.433057 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.444302 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.462198 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.475916 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.490156 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.507528 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.507583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.507599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.507622 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.507637 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.520457 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.534325 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.549873 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.567711 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.579665 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.591620 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.605859 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.609512 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.609550 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.609561 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.609578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.609590 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.712115 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.712157 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.712169 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.712186 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.712198 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.903461 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.903503 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.903514 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.903530 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:28 crc kubenswrapper[4975]: I0126 00:07:28.903541 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:28Z","lastTransitionTime":"2026-01-26T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.005610 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.005654 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.005665 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.005679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.005688 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.107749 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.107786 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.107794 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.107808 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.107816 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.121036 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:52:38.354415603 +0000 UTC Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.210467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.210511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.210520 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.210548 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.210559 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.312359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.312406 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.312415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.312430 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.312440 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.325385 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p"] Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.325869 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.327747 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.328754 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.340121 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.351539 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.363767 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.373489 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.384172 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.396416 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.404944 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.407558 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhjbn\" (UniqueName: \"kubernetes.io/projected/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-kube-api-access-zhjbn\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.407611 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.407967 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.408046 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.414250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.414301 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.414313 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.414329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.414699 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.424846 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.435265 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.444521 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.461290 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.472879 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.483017 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.492108 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.502149 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509220 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509272 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509299 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhjbn\" (UniqueName: \"kubernetes.io/projected/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-kube-api-access-zhjbn\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509321 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509693 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-env-overrides\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.509862 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.514055 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:29Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.515621 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.517469 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.517833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.517918 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.518020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.518102 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.526531 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhjbn\" (UniqueName: \"kubernetes.io/projected/ae52bf4e-67f1-480b-af6d-2d1d2ce37e01-kube-api-access-zhjbn\") pod \"ovnkube-control-plane-749d76644c-xwb6p\" (UID: \"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.620095 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.620130 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.620141 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.620156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.620167 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.642514 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" Jan 26 00:07:29 crc kubenswrapper[4975]: W0126 00:07:29.656923 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podae52bf4e_67f1_480b_af6d_2d1d2ce37e01.slice/crio-0a38bddfcac85baf924b78609ed2702085e8a15af7a489ad6b8ce8bb5049d223 WatchSource:0}: Error finding container 0a38bddfcac85baf924b78609ed2702085e8a15af7a489ad6b8ce8bb5049d223: Status 404 returned error can't find the container with id 0a38bddfcac85baf924b78609ed2702085e8a15af7a489ad6b8ce8bb5049d223 Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.722831 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.722898 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.722913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.722928 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.722939 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.824952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.824989 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.824999 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.825014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.825026 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.913244 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.913363 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.913392 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:29 crc kubenswrapper[4975]: E0126 00:07:29.913508 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:07:45.913471352 +0000 UTC m=+50.034676886 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:29 crc kubenswrapper[4975]: E0126 00:07:29.913518 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:29 crc kubenswrapper[4975]: E0126 00:07:29.913614 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:45.913600006 +0000 UTC m=+50.034805530 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:29 crc kubenswrapper[4975]: E0126 00:07:29.913608 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:29 crc kubenswrapper[4975]: E0126 00:07:29.913815 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:45.913777761 +0000 UTC m=+50.034983295 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.926833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.926878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.926893 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.926914 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:29 crc kubenswrapper[4975]: I0126 00:07:29.926936 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:29Z","lastTransitionTime":"2026-01-26T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.014395 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.014460 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014586 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014594 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014632 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014651 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014604 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014715 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:46.014693482 +0000 UTC m=+50.135898996 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014721 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.014771 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:46.014763015 +0000 UTC m=+50.135968509 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.029214 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.029254 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.029265 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.029281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.029291 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.121785 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 04:02:21.750522058 +0000 UTC Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.132057 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.132093 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.132101 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.132114 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.132124 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.146251 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.146309 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.146392 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.146276 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.146505 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.146559 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.234023 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.234061 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.234071 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.234087 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.234097 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.336055 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.336086 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.336095 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.336107 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.336119 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.391329 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" event={"ID":"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01","Type":"ContainerStarted","Data":"cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.391381 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" event={"ID":"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01","Type":"ContainerStarted","Data":"5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.391392 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" event={"ID":"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01","Type":"ContainerStarted","Data":"0a38bddfcac85baf924b78609ed2702085e8a15af7a489ad6b8ce8bb5049d223"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.393817 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/1.log" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.394450 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/0.log" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.398908 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727" exitCode=1 Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.398945 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.399006 4975 scope.go:117] "RemoveContainer" containerID="b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.399901 4975 scope.go:117] "RemoveContainer" containerID="cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.400171 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.407546 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.416716 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-s459q"] Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.417150 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.417218 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.417560 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.427123 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.438357 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.438391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.438400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.438413 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.438423 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.445086 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.458368 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.472532 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.489189 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.503001 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.515124 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.518832 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrfhx\" (UniqueName: \"kubernetes.io/projected/99d35071-9f6d-45df-841f-fd49ea0550c1-kube-api-access-hrfhx\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.518866 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.527100 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.539719 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.540886 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.540914 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.540923 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.540937 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.540947 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.553300 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.569102 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.580774 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.591205 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.603995 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.619293 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.619507 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.619581 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrfhx\" (UniqueName: \"kubernetes.io/projected/99d35071-9f6d-45df-841f-fd49ea0550c1-kube-api-access-hrfhx\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.619667 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: E0126 00:07:30.619726 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:31.119710374 +0000 UTC m=+35.240915868 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.631387 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.639004 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrfhx\" (UniqueName: \"kubernetes.io/projected/99d35071-9f6d-45df-841f-fd49ea0550c1-kube-api-access-hrfhx\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.643191 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.643230 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.643242 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.643261 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.643272 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.647281 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.659169 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.671482 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.681212 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.690582 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.706438 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.716490 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.726006 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.735960 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.745398 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.745434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.745443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.745461 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.745469 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.749043 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.760226 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.772198 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.790180 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.801664 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.812890 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.847840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.847884 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.847895 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.847910 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.847920 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.951148 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.951210 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.951222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.951237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:30 crc kubenswrapper[4975]: I0126 00:07:30.951246 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:30Z","lastTransitionTime":"2026-01-26T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.053981 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.054024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.054036 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.054055 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.054066 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.122697 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 14:45:20.094998835 +0000 UTC Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.126285 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:31 crc kubenswrapper[4975]: E0126 00:07:31.126503 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:31 crc kubenswrapper[4975]: E0126 00:07:31.126692 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:32.12667149 +0000 UTC m=+36.247876984 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.156910 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.156944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.156952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.156965 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.156973 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.258814 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.258859 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.258870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.258887 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.258898 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.360835 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.360873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.360883 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.360897 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.360907 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.403094 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/1.log" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.463230 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.463266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.463277 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.463291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.463304 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.569932 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.569964 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.569972 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.569986 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.569996 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.672657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.672721 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.672770 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.672789 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.672800 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.775813 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.775852 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.775861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.775875 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.775885 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.878104 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.878162 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.878173 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.878188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.878196 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.980498 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.980833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.980934 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.981025 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:31 crc kubenswrapper[4975]: I0126 00:07:31.981172 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:31Z","lastTransitionTime":"2026-01-26T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.017237 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.029389 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.039797 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.048110 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.060217 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.069945 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.079418 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.082705 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.082744 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.082755 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.082771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.082780 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.098249 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.109792 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.122628 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.123081 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 05:51:49.560177292 +0000 UTC Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.136959 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.137080 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.137133 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:34.137117688 +0000 UTC m=+38.258323182 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.142391 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.147165 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.147180 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.147263 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.147408 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.147399 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.147500 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.147569 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:32 crc kubenswrapper[4975]: E0126 00:07:32.147622 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.154228 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.164696 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.175564 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.184931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.184966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.184975 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.184989 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.184998 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.188021 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.199923 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.211524 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.222932 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:32Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.286744 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.286773 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.286781 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.286793 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.286822 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.388651 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.388680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.388688 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.388700 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.388707 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.491135 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.491166 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.491174 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.491186 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.491196 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.594019 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.594112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.594135 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.594777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.594810 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.697088 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.697136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.697152 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.697179 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.697197 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.799771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.799863 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.799888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.799919 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.799943 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.902703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.902757 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.902767 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.902780 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:32 crc kubenswrapper[4975]: I0126 00:07:32.902789 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:32Z","lastTransitionTime":"2026-01-26T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.005045 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.005079 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.005095 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.005109 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.005119 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.107419 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.107468 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.107480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.107499 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.107512 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.123786 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 13:22:13.532685107 +0000 UTC Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.209592 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.209653 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.209663 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.209675 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.209685 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.311279 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.311328 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.311342 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.311361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.311373 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.413371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.413402 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.413411 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.413424 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.413433 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.516664 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.516701 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.516709 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.516725 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.516753 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.619364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.619412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.619422 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.619439 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.619450 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.722083 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.722129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.722141 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.722157 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.722170 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.824511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.824544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.824554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.824566 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.824575 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.927003 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.927055 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.927063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.927077 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:33 crc kubenswrapper[4975]: I0126 00:07:33.927087 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:33Z","lastTransitionTime":"2026-01-26T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.030117 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.030157 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.030166 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.030182 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.030192 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.124295 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 14:53:00.852040783 +0000 UTC Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.132320 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.132354 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.132363 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.132378 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.132387 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.147200 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.147277 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.147299 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.147214 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.147370 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.147456 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.147544 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.147702 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.160076 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.160249 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:34 crc kubenswrapper[4975]: E0126 00:07:34.160328 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:38.160303554 +0000 UTC m=+42.281509088 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.234911 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.234996 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.235019 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.235045 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.235063 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.337566 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.337614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.337625 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.337643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.337654 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.440318 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.440378 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.440386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.440401 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.440429 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.543681 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.543769 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.543781 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.543799 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.543807 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.647523 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.647583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.647599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.647626 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.647645 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.750901 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.750965 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.750982 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.751005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.751020 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.853713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.853819 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.853836 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.853861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.853893 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.957163 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.957565 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.957578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.957599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:34 crc kubenswrapper[4975]: I0126 00:07:34.957612 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:34Z","lastTransitionTime":"2026-01-26T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.061142 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.061200 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.061217 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.061240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.061256 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.124827 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 20:25:53.627752633 +0000 UTC Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.129933 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.129996 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.130014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.130042 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.130061 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.149938 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.155257 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.155296 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.155309 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.155331 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.155346 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.181121 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.186450 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.186491 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.186501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.186545 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.186559 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.208252 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.213614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.213688 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.213709 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.213759 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.213776 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.234114 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.239848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.239920 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.239945 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.239977 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.240000 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.261305 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:35Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:35 crc kubenswrapper[4975]: E0126 00:07:35.261535 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.264105 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.264249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.264364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.264618 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.264772 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.367643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.367685 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.367697 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.367716 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.367728 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.470963 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.471406 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.471624 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.471834 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.471988 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.575463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.575528 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.575546 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.575570 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.575587 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.679706 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.680158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.680343 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.680480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.680622 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.783729 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.783822 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.783838 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.783861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.783880 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.887074 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.887130 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.887146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.887168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.887185 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.990097 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.990131 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.990139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.990156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:35 crc kubenswrapper[4975]: I0126 00:07:35.990167 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:35Z","lastTransitionTime":"2026-01-26T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.093288 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.093334 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.093346 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.093366 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.093378 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.125207 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 09:48:25.178666409 +0000 UTC Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.146671 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:36 crc kubenswrapper[4975]: E0126 00:07:36.146908 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.146960 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.146919 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:36 crc kubenswrapper[4975]: E0126 00:07:36.147094 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.147137 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:36 crc kubenswrapper[4975]: E0126 00:07:36.147295 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:36 crc kubenswrapper[4975]: E0126 00:07:36.147429 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.170665 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b830686705348bd80bc4e939e017b2deaad113c7dc10394c81c81908345d9871\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"message\\\":\\\"t-go/informers/factory.go:160\\\\nI0126 00:07:26.657996 6224 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658011 6224 reflector.go:311] Stopping reflector *v1.UserDefinedNetwork (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/userdefinednetwork/v1/apis/informers/externalversions/factory.go:140\\\\nI0126 00:07:26.658171 6224 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658229 6224 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658470 6224 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.658565 6224 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.658773 6224 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0126 00:07:26.659285 6224 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0126 00:07:26.659307 6224 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.188654 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.196334 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.196382 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.196396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.196415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.196431 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.204374 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.221257 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.241139 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.255531 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.270405 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.293590 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.298216 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.298492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.298641 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.298844 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.298985 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.312018 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.328440 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.344793 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.359855 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.378188 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.394384 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.401773 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.401830 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.401843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.401862 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.401873 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.408822 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.424196 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.438224 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.503958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.503997 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.504007 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.504020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.504029 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.606574 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.606902 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.607168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.607260 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.607530 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.710187 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.710414 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.710496 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.710583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.710668 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.813307 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.813397 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.813423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.813458 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.813480 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.916930 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.917009 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.917028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.917053 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:36 crc kubenswrapper[4975]: I0126 00:07:36.917070 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:36Z","lastTransitionTime":"2026-01-26T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.020364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.020426 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.020446 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.020472 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.020490 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.123388 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.123456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.123477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.123504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.123523 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.126620 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 05:51:06.878919786 +0000 UTC Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.226580 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.226636 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.226656 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.226680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.226699 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.329801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.329888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.329913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.329944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.329968 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.433161 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.433499 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.433637 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.433861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.434014 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.536574 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.536645 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.536699 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.536770 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.536797 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.639917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.639969 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.639985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.640007 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.640019 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.743366 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.743452 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.743488 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.743523 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.743549 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.845874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.845965 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.845989 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.846021 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.846044 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.948638 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.948703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.948721 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.948779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:37 crc kubenswrapper[4975]: I0126 00:07:37.948821 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:37Z","lastTransitionTime":"2026-01-26T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.051891 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.051956 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.051974 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.052000 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.052016 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.127674 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 04:50:38.67693214 +0000 UTC Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.146223 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.146323 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.146405 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.146335 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.146451 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.146323 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.146543 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.146611 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.155167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.155251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.155281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.155299 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.155311 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.203457 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.203654 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:38 crc kubenswrapper[4975]: E0126 00:07:38.203732 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:07:46.203709576 +0000 UTC m=+50.324915110 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.257631 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.257665 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.257674 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.257689 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.257699 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.360999 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.361066 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.361085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.361108 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.361126 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.464944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.465007 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.465023 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.465046 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.465064 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.568016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.568138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.568162 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.568194 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.568220 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.671480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.671558 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.671580 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.671605 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.671622 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.774349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.774394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.774407 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.774425 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.774439 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.877190 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.877225 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.877240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.877259 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.877272 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.980304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.980371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.980393 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.980424 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:38 crc kubenswrapper[4975]: I0126 00:07:38.980446 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:38Z","lastTransitionTime":"2026-01-26T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.083677 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.083832 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.083910 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.083932 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.083947 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.128164 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 23:31:17.394740254 +0000 UTC Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.187134 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.187185 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.187202 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.187222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.187235 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.289560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.289616 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.289624 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.289637 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.289645 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.392485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.392554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.392567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.392584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.392595 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.494655 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.494705 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.494713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.494742 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.494752 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.597022 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.597090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.597110 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.597138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.597157 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.699852 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.699888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.699902 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.699919 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.699930 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.801811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.801853 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.801863 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.801880 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.801890 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.904262 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.904326 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.904342 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.904365 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:39 crc kubenswrapper[4975]: I0126 00:07:39.904382 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:39Z","lastTransitionTime":"2026-01-26T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.006945 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.007014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.007036 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.007067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.007084 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.109898 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.110170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.110256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.110344 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.110447 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.128558 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 13:24:02.923322865 +0000 UTC Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.146936 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.147014 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.147112 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.147147 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:40 crc kubenswrapper[4975]: E0126 00:07:40.147530 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:40 crc kubenswrapper[4975]: E0126 00:07:40.147682 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:40 crc kubenswrapper[4975]: E0126 00:07:40.147839 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:40 crc kubenswrapper[4975]: E0126 00:07:40.148114 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.213218 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.213316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.213341 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.213369 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.213389 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.316449 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.316529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.316549 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.316571 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.316619 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.407508 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.408795 4975 scope.go:117] "RemoveContainer" containerID="cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.420761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.420830 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.420849 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.420877 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.420896 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.437312 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.454165 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.474378 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.491600 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.506355 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.522255 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.523164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.523193 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.523201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.523215 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.523225 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.534031 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.544970 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.556452 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.574338 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.586684 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.602377 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.619547 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.625643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.625666 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.625676 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.625689 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.625697 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.634643 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.648415 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.659587 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.671796 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.728607 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.728644 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.728653 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.728666 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.728677 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.833184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.833242 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.833260 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.833281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.833297 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.935294 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.935332 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.935340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.935352 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:40 crc kubenswrapper[4975]: I0126 00:07:40.935360 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:40Z","lastTransitionTime":"2026-01-26T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.037278 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.037317 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.037330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.037349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.037363 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.129555 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 05:01:35.944598663 +0000 UTC Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.139415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.139450 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.139460 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.139475 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.139489 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.242288 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.242352 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.242367 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.242391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.242406 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.344715 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.344795 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.344814 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.344837 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.344853 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.442583 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/1.log" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.445015 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.445880 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.446871 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.446913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.446925 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.446951 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.446961 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.464470 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.481787 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.494364 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.507421 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.530925 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.551011 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.552223 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.552258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.552267 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.552282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.552292 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.564428 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.575902 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.586598 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.598841 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.611190 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.626641 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.640953 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.651959 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.654280 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.654323 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.654335 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.654350 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.654360 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.664579 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.679516 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.692141 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:41Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.757371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.757433 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.757446 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.757466 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.757477 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.860158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.860194 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.860204 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.860218 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.860229 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.962352 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.962390 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.962400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.962414 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:41 crc kubenswrapper[4975]: I0126 00:07:41.962422 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:41Z","lastTransitionTime":"2026-01-26T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.064378 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.064414 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.064423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.064437 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.064447 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.130708 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 05:17:37.216977084 +0000 UTC Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.146406 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.146506 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.146436 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.146436 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:42 crc kubenswrapper[4975]: E0126 00:07:42.146599 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:42 crc kubenswrapper[4975]: E0126 00:07:42.146918 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:42 crc kubenswrapper[4975]: E0126 00:07:42.146994 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:42 crc kubenswrapper[4975]: E0126 00:07:42.147144 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.166955 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.167003 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.167015 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.167031 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.167041 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.270348 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.270415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.270435 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.270472 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.270510 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.373850 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.373905 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.373920 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.373937 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.373947 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.450687 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/2.log" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.451786 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/1.log" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.456544 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" exitCode=1 Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.456591 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.456663 4975 scope.go:117] "RemoveContainer" containerID="cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.457923 4975 scope.go:117] "RemoveContainer" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" Jan 26 00:07:42 crc kubenswrapper[4975]: E0126 00:07:42.458658 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.476657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.477067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.477085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.477112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.477128 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.480555 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.500659 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.522055 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.554361 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.576467 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.579364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.579400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.579412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.579428 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.579438 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.597297 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.616345 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.637019 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.658260 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.672707 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.682020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.682044 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.682052 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.682064 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.682072 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.693929 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.712439 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.729110 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.761333 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cca9d405aba783207366b7394bb551bcd2c0cb9087dba413630cf901452cf727\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"message\\\":\\\"roller: failed to start default network controller: unable to create new egress service controller while creating new default network controller: handler {0x2009c00 0x20098e0 0x2009880} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:28Z is after 2025-08-24T17:21:41Z]\\\\nI0126 00:07:28.166936 6427 services_controller.go:473] Services do not match for network=default, existing lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-network-console/networking-console-plugin_TCP_cluster\\\\\\\", UUID:\\\\\\\"ab0b1d51-5ec6-479b-8881-93dfa8d30337\\\\\\\", Protocol:\\\\\\\"tcp\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-network-console/networking-console-plugin\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGr\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:27Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.780261 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.784555 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.784608 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.784634 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.784664 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.784688 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.800675 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.817323 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:42Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.887178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.887233 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.887250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.887274 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.887291 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.990496 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.990568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.990588 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.990615 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:42 crc kubenswrapper[4975]: I0126 00:07:42.990636 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:42Z","lastTransitionTime":"2026-01-26T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.093577 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.093636 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.093653 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.093676 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.093694 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.130956 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 16:33:06.654864202 +0000 UTC Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.195958 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.196029 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.196048 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.196077 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.196098 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.298520 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.298567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.298580 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.298599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.298611 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.401408 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.401469 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.401483 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.401504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.401516 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.462405 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/2.log" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.466294 4975 scope.go:117] "RemoveContainer" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" Jan 26 00:07:43 crc kubenswrapper[4975]: E0126 00:07:43.466516 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.492287 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.503953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.503990 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.503998 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.504014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.504025 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.506505 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.518197 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.529082 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.548426 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.564760 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.580278 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.603363 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.605808 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.605832 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.605855 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.605868 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.605877 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.618939 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.631366 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.644460 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.655899 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.673618 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.684151 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.695764 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.706284 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.707568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.707602 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.707610 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.707644 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.707655 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.714948 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:43Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.810150 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.810190 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.810201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.810217 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.810228 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.912293 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.912331 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.912343 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.912358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:43 crc kubenswrapper[4975]: I0126 00:07:43.912366 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:43Z","lastTransitionTime":"2026-01-26T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.014993 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.015024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.015040 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.015062 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.015074 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.118498 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.118575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.118602 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.118636 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.118661 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.132028 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 01:21:05.381211283 +0000 UTC Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.146530 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.146590 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.146643 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:44 crc kubenswrapper[4975]: E0126 00:07:44.146894 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.146954 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:44 crc kubenswrapper[4975]: E0126 00:07:44.147063 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:44 crc kubenswrapper[4975]: E0126 00:07:44.147180 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:44 crc kubenswrapper[4975]: E0126 00:07:44.147322 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.221778 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.221840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.221854 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.221872 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.221883 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.324178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.324221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.324231 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.324249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.324260 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.426566 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.426619 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.426640 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.426663 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.426680 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.529237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.529290 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.529304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.529322 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.529333 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.632526 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.632632 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.632652 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.632679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.632700 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.735552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.735599 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.735615 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.735659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.735676 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.838434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.838509 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.838528 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.838554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.838570 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.941801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.942295 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.942474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.942630 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:44 crc kubenswrapper[4975]: I0126 00:07:44.942822 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:44Z","lastTransitionTime":"2026-01-26T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.045191 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.045228 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.045239 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.045255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.045265 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.133010 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 08:57:50.037897269 +0000 UTC Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.147982 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.148020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.148033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.148047 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.148058 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.250992 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.251027 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.251037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.251053 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.251063 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.353547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.353584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.353593 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.353607 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.353616 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.456136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.456208 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.456230 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.456261 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.456286 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.526949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.526992 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.527000 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.527016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.527025 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.546591 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.551267 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.551348 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.551371 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.551395 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.551412 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.569260 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.574182 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.574234 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.574250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.574276 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.574294 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.591913 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.597199 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.597245 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.597262 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.597325 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.597344 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.615194 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.620787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.620821 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.620829 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.620843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.620854 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.640050 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.640429 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.642258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.642304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.642313 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.642327 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.642337 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.745258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.745305 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.745314 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.745328 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.745337 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.847793 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.847828 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.847837 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.847849 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.847859 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.950881 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.951155 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.951228 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.951305 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.951372 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:45Z","lastTransitionTime":"2026-01-26T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.992349 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.992451 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:45 crc kubenswrapper[4975]: I0126 00:07:45.992474 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.992585 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.992628 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:17.99261586 +0000 UTC m=+82.113821354 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.992795 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:08:17.992786845 +0000 UTC m=+82.113992329 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.992840 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:45 crc kubenswrapper[4975]: E0126 00:07:45.992858 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:17.992853207 +0000 UTC m=+82.114058691 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.053799 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.053869 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.053889 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.053921 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.053943 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.093406 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.093464 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093578 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093604 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093615 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093658 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093697 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093710 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:18.093694615 +0000 UTC m=+82.214900109 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093717 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.093807 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:18.093784918 +0000 UTC m=+82.214990442 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.133804 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 04:27:00.744139574 +0000 UTC Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.147096 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.147246 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.147270 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.147314 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.147537 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.147760 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.147963 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.148080 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.155593 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.155773 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.155882 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.155966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.156053 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.163138 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.176652 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.189942 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.225966 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.247488 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.259363 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.259400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.259410 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.259427 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.259439 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.262237 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.293508 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.294981 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.295220 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: E0126 00:07:46.295302 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:02.29528002 +0000 UTC m=+66.416485554 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.311928 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.333659 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.352585 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.362803 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.362845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.362853 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.362870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.362883 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.374266 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.396682 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.413727 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.435060 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.453209 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.466375 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.466444 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.466457 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.466473 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.466485 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.469186 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.494097 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.570004 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.570063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.570084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.570108 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.570127 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.673792 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.673857 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.673904 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.673930 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.673949 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.777786 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.777847 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.777870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.777900 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.777923 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.881123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.881167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.881180 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.881196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.881208 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.983251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.983290 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.983298 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.983312 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:46 crc kubenswrapper[4975]: I0126 00:07:46.983321 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:46Z","lastTransitionTime":"2026-01-26T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.085188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.085244 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.085263 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.085289 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.085307 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.134849 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 23:03:57.809797772 +0000 UTC Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.187707 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.187761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.187773 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.187791 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.187802 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.290920 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.290982 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.291005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.291035 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.291054 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.393553 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.393596 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.393608 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.393654 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.393667 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.497319 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.497367 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.497383 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.497401 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.497415 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.599712 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.599826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.599846 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.599870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.599893 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.705935 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.706017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.706042 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.706077 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.706111 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.810328 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.810380 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.810395 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.810415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.810429 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.913076 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.913123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.913133 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.913148 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:47 crc kubenswrapper[4975]: I0126 00:07:47.913159 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:47Z","lastTransitionTime":"2026-01-26T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.016081 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.016148 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.016158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.016173 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.016183 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.118724 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.118821 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.118843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.118871 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.118893 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.135179 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 11:02:11.408993785 +0000 UTC Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.146543 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.146584 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.146627 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.146543 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:48 crc kubenswrapper[4975]: E0126 00:07:48.146674 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:48 crc kubenswrapper[4975]: E0126 00:07:48.146818 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:48 crc kubenswrapper[4975]: E0126 00:07:48.146928 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:48 crc kubenswrapper[4975]: E0126 00:07:48.146989 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.220296 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.220323 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.220332 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.220343 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.220353 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.322756 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.322803 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.322813 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.322828 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.322855 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.425382 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.425454 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.425475 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.425502 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.425521 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.527236 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.527297 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.527314 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.527338 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.527353 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.630384 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.630416 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.630424 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.630436 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.630445 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.734248 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.734394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.734413 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.734437 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.734454 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.837688 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.837836 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.837858 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.837920 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.837938 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.941009 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.941069 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.941086 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.941112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:48 crc kubenswrapper[4975]: I0126 00:07:48.941130 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:48Z","lastTransitionTime":"2026-01-26T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.043605 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.043666 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.043684 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.043707 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.043724 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.136411 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 01:13:00.652088255 +0000 UTC Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.147060 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.147134 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.147147 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.147167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.147180 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.249692 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.249755 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.249767 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.249796 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.249810 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.352257 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.352330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.352349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.352372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.352389 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.455389 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.455453 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.455470 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.455493 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.455510 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.558894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.558963 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.558979 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.559008 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.559026 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.661463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.661529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.661554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.661584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.661607 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.764125 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.764224 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.764249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.764285 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.764308 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.867391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.867450 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.867463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.867482 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.867494 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.970256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.970393 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.970408 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.970432 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:49 crc kubenswrapper[4975]: I0126 00:07:49.970443 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:49Z","lastTransitionTime":"2026-01-26T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.073168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.073216 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.073229 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.073246 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.073257 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.137334 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 05:50:19.8314642 +0000 UTC Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.146634 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.146674 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:50 crc kubenswrapper[4975]: E0126 00:07:50.146773 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.146801 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.146870 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:50 crc kubenswrapper[4975]: E0126 00:07:50.147005 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:50 crc kubenswrapper[4975]: E0126 00:07:50.147166 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:50 crc kubenswrapper[4975]: E0126 00:07:50.147208 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.175412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.175467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.175479 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.175496 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.175508 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.225427 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.240400 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.245795 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.260372 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.276058 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.278118 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.278184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.278212 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.278243 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.278264 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.292196 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.303138 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.321521 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.333788 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.349603 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.360214 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.381180 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.381222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.381234 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.381250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.381260 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.382358 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.395405 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.409248 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.431792 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.445490 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.464817 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.484500 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.484573 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.484598 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.484633 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.484656 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.485640 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.506189 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.588564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.588668 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.588682 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.588702 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.588714 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.691301 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.691359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.691376 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.691400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.691416 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.794165 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.794435 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.794522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.794614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.794726 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.897038 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.897101 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.897123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.897152 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.897173 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.999777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.999854 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:50 crc kubenswrapper[4975]: I0126 00:07:50.999872 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:50.999894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:50.999912 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:50Z","lastTransitionTime":"2026-01-26T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.102769 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.102830 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.102842 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.102862 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.102873 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.138200 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 07:35:35.062790907 +0000 UTC Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.206419 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.206492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.206507 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.206526 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.206539 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.309940 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.310065 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.310085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.310111 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.310127 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.412320 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.412361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.412372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.412388 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.412400 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.515272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.515312 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.515324 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.515340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.515352 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.618256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.618494 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.618567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.618636 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.618696 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.721489 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.721524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.721534 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.721548 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.721557 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.824059 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.824096 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.824106 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.824123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.824136 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.926221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.926267 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.926283 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.926304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:51 crc kubenswrapper[4975]: I0126 00:07:51.926316 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:51Z","lastTransitionTime":"2026-01-26T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.028334 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.028367 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.028376 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.028389 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.028397 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.130594 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.130646 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.130668 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.130696 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.130718 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.138795 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 04:53:23.254719335 +0000 UTC Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.146280 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.146323 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.146289 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:52 crc kubenswrapper[4975]: E0126 00:07:52.146385 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.146284 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:52 crc kubenswrapper[4975]: E0126 00:07:52.146485 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:52 crc kubenswrapper[4975]: E0126 00:07:52.146560 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:52 crc kubenswrapper[4975]: E0126 00:07:52.146705 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.233241 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.233271 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.233282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.233296 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.233306 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.335719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.335772 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.335784 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.335798 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.335808 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.438641 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.438727 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.438787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.438814 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.438831 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.541679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.541779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.541799 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.541822 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.541840 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.644721 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.644763 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.644774 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.644788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.644797 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.747840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.747866 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.747874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.747886 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.747895 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.850137 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.850184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.850202 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.850266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.850292 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.953849 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.953969 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.953988 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.954010 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:52 crc kubenswrapper[4975]: I0126 00:07:52.954026 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:52Z","lastTransitionTime":"2026-01-26T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.056176 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.056229 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.056255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.056281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.056296 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.138945 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 18:56:46.203635072 +0000 UTC Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.159085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.159135 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.159156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.159184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.159203 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.261504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.261549 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.261564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.261584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.261599 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.365412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.365470 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.365488 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.365511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.365525 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.468874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.468947 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.468968 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.468986 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.468997 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.571609 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.571672 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.571694 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.571719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.571772 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.674386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.674724 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.674927 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.675085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.675228 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.778821 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.778880 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.778899 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.778925 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.778944 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.881263 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.881330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.881348 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.881382 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.881405 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.984695 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.985033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.985246 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.985437 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:53 crc kubenswrapper[4975]: I0126 00:07:53.985603 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:53Z","lastTransitionTime":"2026-01-26T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.088362 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.088710 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.088990 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.089252 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.089500 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.139449 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 17:21:28.101534788 +0000 UTC Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.146967 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:54 crc kubenswrapper[4975]: E0126 00:07:54.147108 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.147317 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:54 crc kubenswrapper[4975]: E0126 00:07:54.147624 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.147400 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:54 crc kubenswrapper[4975]: E0126 00:07:54.148198 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.147354 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:54 crc kubenswrapper[4975]: E0126 00:07:54.148905 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.148704 4975 scope.go:117] "RemoveContainer" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" Jan 26 00:07:54 crc kubenswrapper[4975]: E0126 00:07:54.149636 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.192787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.192850 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.192875 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.192905 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.192926 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.295695 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.295751 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.295763 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.295779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.295792 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.399016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.399131 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.399149 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.399188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.399216 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.502364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.502435 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.502474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.502506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.502527 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.606286 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.606358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.606375 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.606403 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.606422 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.709669 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.709724 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.709761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.709786 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.709807 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.817512 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.818504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.818682 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.818889 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.819042 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.922655 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.922709 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.922721 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.922758 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:54 crc kubenswrapper[4975]: I0126 00:07:54.922773 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:54Z","lastTransitionTime":"2026-01-26T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.025397 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.025458 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.025479 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.025501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.025519 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.129013 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.129080 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.129102 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.129130 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.129150 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.140266 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 05:18:56.612359192 +0000 UTC Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.231775 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.231826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.231846 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.231874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.231894 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.335065 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.335201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.335222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.335250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.335271 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.439352 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.439434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.439467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.439506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.439535 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.543132 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.543201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.543221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.543246 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.543266 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.647692 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.647806 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.647827 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.647856 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.647875 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.752011 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.752090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.752117 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.752178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.752202 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.855456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.855530 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.855550 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.855579 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.855607 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.959309 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.959379 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.959398 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.959426 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:55 crc kubenswrapper[4975]: I0126 00:07:55.959445 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:55Z","lastTransitionTime":"2026-01-26T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.006644 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.006694 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.006713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.006765 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.006782 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.059893 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.065811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.065878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.065895 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.065921 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.065945 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.086326 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.090813 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.090882 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.090901 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.090930 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.090951 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.112579 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.117355 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.117416 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.117437 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.117465 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.117486 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.140672 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 13:06:00.856802727 +0000 UTC Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.145454 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.146211 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.146272 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.146357 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.146452 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.146516 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.146588 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.146674 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.146877 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.152682 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.152716 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.152745 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.152768 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.152783 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.166271 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.175942 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: E0126 00:07:56.176235 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.179987 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.180192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.180420 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.180652 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.180842 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.187213 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.225888 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.249416 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.271349 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.285957 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.286031 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.286056 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.286094 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.286125 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.290957 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.313217 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.334899 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.356667 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.379705 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.389172 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.389308 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.389329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.389356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.389374 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.418409 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.440034 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.463652 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.487925 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.492583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.492640 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.492659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.492687 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.492706 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.505970 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.532523 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.545705 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.561810 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:07:56Z is after 2025-08-24T17:21:41Z" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.597535 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.597923 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.597933 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.597950 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.597961 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.701869 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.701955 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.701981 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.702017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.702042 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.805436 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.805503 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.805524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.805553 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.805604 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.908583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.908668 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.908688 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.908720 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:56 crc kubenswrapper[4975]: I0126 00:07:56.908801 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:56Z","lastTransitionTime":"2026-01-26T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.012265 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.012335 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.012354 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.012381 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.012401 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.115869 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.115915 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.115927 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.115954 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.115965 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.141781 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 03:51:12.997074778 +0000 UTC Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.218671 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.218708 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.218719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.218757 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.218772 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.321219 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.321255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.321266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.321282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.321293 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.424525 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.424591 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.424614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.424648 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.424670 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.526952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.526992 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.527001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.527016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.527025 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.628724 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.628763 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.628771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.628785 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.628794 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.731408 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.731451 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.731463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.731480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.731490 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.834024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.834089 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.834107 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.834146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.834164 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.936931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.936991 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.937008 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.937033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:57 crc kubenswrapper[4975]: I0126 00:07:57.937052 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:57Z","lastTransitionTime":"2026-01-26T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.040189 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.040252 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.040268 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.040292 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.040308 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.141988 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 13:33:58.979738948 +0000 UTC Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.143584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.143642 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.143661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.143687 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.143706 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.147000 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.147049 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.147076 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:07:58 crc kubenswrapper[4975]: E0126 00:07:58.147169 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.147202 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:07:58 crc kubenswrapper[4975]: E0126 00:07:58.147548 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:07:58 crc kubenswrapper[4975]: E0126 00:07:58.147652 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:07:58 crc kubenswrapper[4975]: E0126 00:07:58.147798 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.246837 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.246906 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.246926 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.246964 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.246988 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.350359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.350418 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.350434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.350456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.350472 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.453657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.453718 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.453780 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.453812 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.453836 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.558018 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.558094 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.558112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.558137 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.558155 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.660472 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.660995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.661183 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.661373 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.661565 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.765398 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.765476 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.765496 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.765522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.765539 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.868213 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.868246 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.868255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.868269 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.868310 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.970474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.970543 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.970560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.970582 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:58 crc kubenswrapper[4975]: I0126 00:07:58.970601 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:58Z","lastTransitionTime":"2026-01-26T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.073634 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.073703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.073721 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.073884 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.073910 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.142821 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 14:13:49.597404685 +0000 UTC Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.176564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.176617 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.176638 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.176659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.176676 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.279865 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.279938 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.279956 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.279981 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.279998 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.382475 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.382518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.382526 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.382552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.382564 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.485104 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.485177 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.485196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.485219 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.485238 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.588093 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.588164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.588181 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.588206 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.588226 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.690397 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.690448 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.690459 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.690477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.690493 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.793034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.793081 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.793095 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.793113 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.793126 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.895579 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.895614 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.895622 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.895638 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.895647 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.998597 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.998641 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.998652 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.998671 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:07:59 crc kubenswrapper[4975]: I0126 00:07:59.998684 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:07:59Z","lastTransitionTime":"2026-01-26T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.100903 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.100954 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.100970 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.100991 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.101006 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.143837 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 09:13:27.69412809 +0000 UTC Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.147076 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.147137 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.147187 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:00 crc kubenswrapper[4975]: E0126 00:08:00.147284 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.147306 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:00 crc kubenswrapper[4975]: E0126 00:08:00.147415 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:00 crc kubenswrapper[4975]: E0126 00:08:00.147551 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:00 crc kubenswrapper[4975]: E0126 00:08:00.147921 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.203110 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.203146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.203154 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.203170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.203180 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.305326 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.305360 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.305370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.305386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.305397 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.407512 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.407556 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.407568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.407583 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.407592 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.509405 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.509444 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.509453 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.509467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.509476 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.611276 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.611323 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.611334 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.611351 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.611368 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.713797 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.713849 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.713865 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.713887 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.713903 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.816025 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.816057 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.816065 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.816079 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.816087 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.918084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.918129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.918145 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.918164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:00 crc kubenswrapper[4975]: I0126 00:08:00.918176 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:00Z","lastTransitionTime":"2026-01-26T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.020964 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.021005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.021015 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.021030 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.021040 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.127349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.127380 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.127389 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.127402 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.127411 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.144918 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 17:41:28.533745739 +0000 UTC Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.229310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.229348 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.229384 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.229405 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.229418 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.332333 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.332374 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.332387 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.332405 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.332419 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.435250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.435282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.435296 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.435315 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.435326 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.537913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.537943 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.537951 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.537964 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.537973 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.639957 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.640001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.640013 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.640032 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.640044 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.741860 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.741890 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.741900 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.741915 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.741925 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.843713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.843759 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.843769 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.843783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.843792 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.945818 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.945848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.945859 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.945871 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:01 crc kubenswrapper[4975]: I0126 00:08:01.945879 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:01Z","lastTransitionTime":"2026-01-26T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.048000 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.048039 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.048048 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.048063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.048072 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.145860 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 05:27:34.217178779 +0000 UTC Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.147143 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.147157 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.147327 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.147176 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.147379 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.147158 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.147453 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.147606 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.150245 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.150273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.150282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.150295 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.150305 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.252360 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.252399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.252410 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.252430 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.252443 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.354987 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.355040 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.355055 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.355072 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.355102 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.379027 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.379227 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:08:02 crc kubenswrapper[4975]: E0126 00:08:02.379318 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:08:34.379299877 +0000 UTC m=+98.500505391 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.457364 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.457427 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.457442 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.457461 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.457475 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.559728 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.559782 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.559791 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.559805 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.559828 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.662292 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.662349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.662370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.662394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.662413 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.765485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.765592 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.765615 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.765651 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.765677 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.867980 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.868033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.868050 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.868074 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.868090 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.970399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.970436 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.970447 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.970464 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:02 crc kubenswrapper[4975]: I0126 00:08:02.970474 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:02Z","lastTransitionTime":"2026-01-26T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.074083 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.074150 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.074180 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.074213 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.074235 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.146591 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 20:21:26.796067465 +0000 UTC Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.176981 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.177235 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.177396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.177613 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.177805 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.280328 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.280386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.280400 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.280438 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.280452 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.382138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.382170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.382178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.382190 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.382199 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.483948 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.483985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.483997 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.484012 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.484022 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.586276 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.586308 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.586316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.586329 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.586339 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.688516 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.688556 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.688567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.688578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.688587 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.791178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.791220 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.791233 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.791251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.791262 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.893825 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.893863 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.893873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.893889 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.893898 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.995754 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.995779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.995788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.995801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:03 crc kubenswrapper[4975]: I0126 00:08:03.995810 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:03Z","lastTransitionTime":"2026-01-26T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.097876 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.097941 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.097963 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.097992 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.098012 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.147053 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 12:32:54.609142826 +0000 UTC Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.147275 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.147295 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.147297 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:04 crc kubenswrapper[4975]: E0126 00:08:04.147425 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.147449 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:04 crc kubenswrapper[4975]: E0126 00:08:04.147511 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:04 crc kubenswrapper[4975]: E0126 00:08:04.147578 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:04 crc kubenswrapper[4975]: E0126 00:08:04.147639 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.200378 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.200449 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.200462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.200478 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.200489 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.302965 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.302993 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.303001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.303014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.303023 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.405150 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.405188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.405200 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.405216 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.405226 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.507872 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.507916 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.507928 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.507945 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.507959 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.610192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.610232 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.610242 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.610257 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.610265 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.712825 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.712869 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.712878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.712893 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.712902 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.815207 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.815243 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.815252 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.815266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.815276 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.917657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.917705 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.917713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.917725 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:04 crc kubenswrapper[4975]: I0126 00:08:04.917748 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:04Z","lastTransitionTime":"2026-01-26T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.020099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.020132 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.020141 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.020154 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.020163 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.122008 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.122053 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.122066 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.122082 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.122094 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.147464 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 21:42:08.383312212 +0000 UTC Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.158796 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.224034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.224111 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.224126 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.224145 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.224157 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.327135 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.327174 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.327183 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.327219 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.327229 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.429461 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.429499 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.429509 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.429524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.429534 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.531788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.531817 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.531825 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.531838 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.531848 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.563698 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/0.log" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.563752 4975 generic.go:334] "Generic (PLEG): container finished" podID="7d3cba21-428c-4151-bb16-f3478d54c90e" containerID="e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450" exitCode=1 Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.564170 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerDied","Data":"e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.564399 4975 scope.go:117] "RemoveContainer" containerID="e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.576460 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.588865 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.598591 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.625949 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.636518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.636854 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.636867 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.636886 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.636898 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.639612 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.649395 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.664959 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.676676 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.690237 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.700528 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.712517 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.726214 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.739771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.739798 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.739806 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.739820 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.739829 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.740163 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.752510 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.764480 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.776047 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.785462 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.798541 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.808103 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.842380 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.842477 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.842500 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.842529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.842549 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.945023 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.945064 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.945080 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.945099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:05 crc kubenswrapper[4975]: I0126 00:08:05.945111 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:05Z","lastTransitionTime":"2026-01-26T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.047529 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.047574 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.047584 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.047603 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.047615 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.146346 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.146404 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.146431 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.146472 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.146536 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.146615 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.146627 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.146690 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.147686 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 21:11:17.833068309 +0000 UTC Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.150140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.150168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.150176 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.150189 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.150198 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.157875 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.168456 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.180689 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.192666 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.204046 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:05Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.213146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.213167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.213175 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.213188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.213197 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.228070 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.229154 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.232654 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.232681 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.232689 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.232701 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.232710 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.242880 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.245107 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.248836 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.248867 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.248878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.248893 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.248905 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.257264 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.259522 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.262828 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.262865 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.262879 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.262897 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.262910 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.268824 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.274264 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.277257 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.277280 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.277289 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.277304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.277313 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.283103 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.296688 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.296860 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: E0126 00:08:06.296971 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.298625 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.298652 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.298661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.298674 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.298682 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.306942 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.321028 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.330565 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.338949 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.357905 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.369368 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.380541 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.391159 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.400696 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.401063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.401211 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.401366 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.401512 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.503769 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.504098 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.504224 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.504372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.504530 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.568318 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/0.log" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.568555 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerStarted","Data":"c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.582159 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.609158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.609184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.609192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.609204 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.609212 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.620778 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.643230 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.657344 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.669571 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.682684 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.695091 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.706351 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.711421 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.711443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.711451 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.711467 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.711478 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.717673 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.728700 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.737827 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.750869 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.760590 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.769458 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.778422 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.787591 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.796088 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.812162 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.813175 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.813289 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.813355 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.813421 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.813484 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.823762 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:06Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.915865 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.915912 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.915922 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.915942 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:06 crc kubenswrapper[4975]: I0126 00:08:06.915953 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:06Z","lastTransitionTime":"2026-01-26T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.018681 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.018826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.018853 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.018888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.018909 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.121473 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.121521 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.121533 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.121550 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.121562 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.148185 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 08:29:40.159819977 +0000 UTC Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.224042 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.224086 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.224099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.224116 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.224127 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.325894 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.325929 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.325943 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.325959 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.325969 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.432242 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.432289 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.432300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.432316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.432328 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.534506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.534547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.534563 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.534581 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.534593 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.637628 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.637673 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.637682 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.637699 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.637708 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.740196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.740240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.740249 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.740264 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.740273 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.843026 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.843074 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.843084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.843099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.843107 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.945662 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.945708 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.945719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.945757 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:07 crc kubenswrapper[4975]: I0126 00:08:07.945771 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:07Z","lastTransitionTime":"2026-01-26T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.048552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.048603 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.048618 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.048637 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.048649 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.146710 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.146717 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.146875 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.146927 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:08 crc kubenswrapper[4975]: E0126 00:08:08.146960 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:08 crc kubenswrapper[4975]: E0126 00:08:08.147045 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:08 crc kubenswrapper[4975]: E0126 00:08:08.147111 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:08 crc kubenswrapper[4975]: E0126 00:08:08.147243 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.148630 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 06:17:04.149517319 +0000 UTC Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.150522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.150550 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.150558 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.150573 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.150583 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.253291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.253331 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.253340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.253356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.253368 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.357032 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.357123 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.357149 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.357180 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.357201 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.459418 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.459492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.459511 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.459536 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.459585 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.562076 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.562127 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.562139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.562157 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.562175 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.664396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.664430 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.664439 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.664453 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.664462 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.767039 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.767078 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.767087 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.767102 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.767111 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.869273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.869310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.869319 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.869336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.869345 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.971016 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.971049 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.971058 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.971071 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:08 crc kubenswrapper[4975]: I0126 00:08:08.971080 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:08Z","lastTransitionTime":"2026-01-26T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.073140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.073185 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.073203 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.073222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.073235 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.147195 4975 scope.go:117] "RemoveContainer" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.149053 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 14:28:41.779669245 +0000 UTC Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.175726 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.175791 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.175800 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.175817 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.175830 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.278423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.278492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.278509 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.278532 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.278547 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.380936 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.380973 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.380991 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.381008 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.381019 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.482784 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.482823 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.482835 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.482852 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.482865 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.577463 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/2.log" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.580983 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.583146 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.584944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.584976 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.584986 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.585001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.585012 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.598288 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.615597 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.630290 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.659120 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.672061 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.683235 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.687266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.687310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.687321 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.687340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.687353 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.716209 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.731649 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.744975 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.758863 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.781823 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.791033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.791088 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.791105 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.791127 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.791142 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.795239 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.807677 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.830592 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.843136 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.855095 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.864665 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.876315 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.886505 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:09Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.893425 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.893565 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.893768 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.893861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.893940 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.996639 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.996673 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.996683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.996698 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:09 crc kubenswrapper[4975]: I0126 00:08:09.996707 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:09Z","lastTransitionTime":"2026-01-26T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.099880 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.099919 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.099930 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.099951 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.099963 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.146915 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:10 crc kubenswrapper[4975]: E0126 00:08:10.147042 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.147221 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:10 crc kubenswrapper[4975]: E0126 00:08:10.147275 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.147388 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:10 crc kubenswrapper[4975]: E0126 00:08:10.147448 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.147564 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:10 crc kubenswrapper[4975]: E0126 00:08:10.147604 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.149166 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 12:04:48.451335122 +0000 UTC Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.201920 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.201946 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.201953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.201965 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.201975 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.304620 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.304656 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.304672 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.304697 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.304714 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.406605 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.406675 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.406698 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.406758 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.406778 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.509266 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.509309 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.509324 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.509346 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.509362 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.611899 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.611931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.611942 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.611959 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.611972 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.715018 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.715067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.715078 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.715098 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.715110 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.817569 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.817627 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.817645 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.817671 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.817688 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.921230 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.921262 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.921270 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.921285 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:10 crc kubenswrapper[4975]: I0126 00:08:10.921294 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:10Z","lastTransitionTime":"2026-01-26T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.024485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.024544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.024564 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.024590 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.024607 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.128073 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.128134 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.128152 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.128179 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.128196 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.150362 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 08:39:18.711175706 +0000 UTC Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.230953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.230991 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.231003 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.231021 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.231033 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.333625 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.333697 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.333718 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.333783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.333807 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.437201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.437255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.437273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.437294 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.437314 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.539643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.539694 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.539713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.539768 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.539788 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.588642 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/3.log" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.590040 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/2.log" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.593932 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" exitCode=1 Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.593981 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.594034 4975 scope.go:117] "RemoveContainer" containerID="ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.595465 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:08:11 crc kubenswrapper[4975]: E0126 00:08:11.595834 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.627906 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.643489 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.643555 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.643578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.643606 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.643629 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.648084 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.670219 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.688974 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.702610 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.726132 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:10Z\\\",\\\"message\\\":\\\"ing(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.176\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:1936, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0126 00:08:10.641209 7003 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics for network=default are: map[]\\\\nI0126 00:08:10.641219 7003 services_controller.go:444] Built service openshift-ingress/router-internal-default LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF0126 00:08:10.641225 7003 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to ver\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.737783 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.746227 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.746268 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.746275 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.746290 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.746298 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.750163 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.764041 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.776088 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.794206 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.809767 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.822877 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.835196 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.849762 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.849789 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.849797 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.849811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.849819 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.857223 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.871302 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.888606 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.901446 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.917174 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:11Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.953673 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.953779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.953801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.953831 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:11 crc kubenswrapper[4975]: I0126 00:08:11.953848 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:11Z","lastTransitionTime":"2026-01-26T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.059680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.059787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.059812 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.059844 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.059865 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.147116 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.147188 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.147180 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.147145 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:12 crc kubenswrapper[4975]: E0126 00:08:12.147327 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:12 crc kubenswrapper[4975]: E0126 00:08:12.147474 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:12 crc kubenswrapper[4975]: E0126 00:08:12.147591 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:12 crc kubenswrapper[4975]: E0126 00:08:12.147676 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.150986 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 10:37:42.986448563 +0000 UTC Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.162726 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.162801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.162818 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.162843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.162861 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.267214 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.267263 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.267280 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.267300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.267316 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.370797 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.370867 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.370890 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.370921 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.370948 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.474570 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.474643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.474661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.474692 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.474710 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.577967 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.578005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.578014 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.578028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.578038 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.598685 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/3.log" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.681792 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.681966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.681996 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.682028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.682052 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.785150 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.785200 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.785211 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.785231 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.785244 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.888978 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.889046 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.889068 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.889094 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.889110 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.992414 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.992484 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.992504 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.992531 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:12 crc kubenswrapper[4975]: I0126 00:08:12.992549 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:12Z","lastTransitionTime":"2026-01-26T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.096993 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.097044 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.097061 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.097084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.097101 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.151423 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 08:24:04.608222131 +0000 UTC Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.199728 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.199811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.199827 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.199874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.199893 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.302316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.302353 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.302361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.302374 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.302385 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.409406 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.409806 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.409893 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.409945 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.409974 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.513120 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.513189 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.513211 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.513235 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.513252 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.617260 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.617315 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.617333 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.617359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.617377 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.720967 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.721099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.721117 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.721142 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.721160 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.824373 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.824471 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.824492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.824516 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.824534 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.927518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.927566 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.927576 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.927593 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:13 crc kubenswrapper[4975]: I0126 00:08:13.927602 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:13Z","lastTransitionTime":"2026-01-26T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.030962 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.031040 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.031060 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.031091 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.031128 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.134178 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.134253 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.134277 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.134309 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.134332 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.147191 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.147300 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.147241 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.147206 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:14 crc kubenswrapper[4975]: E0126 00:08:14.147411 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:14 crc kubenswrapper[4975]: E0126 00:08:14.147527 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:14 crc kubenswrapper[4975]: E0126 00:08:14.147627 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:14 crc kubenswrapper[4975]: E0126 00:08:14.147769 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.151610 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 16:19:42.489482689 +0000 UTC Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.237541 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.237569 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.237579 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.237596 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.237606 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.340949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.341009 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.341028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.341050 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.341070 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.444780 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.444858 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.444882 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.444913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.444934 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.548356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.548438 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.548462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.548485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.548498 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.651655 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.651719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.651762 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.651788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.651805 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.754932 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.754985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.755001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.755024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.755044 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.858799 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.858913 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.858934 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.858998 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.859017 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.962488 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.962559 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.962581 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.962608 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:14 crc kubenswrapper[4975]: I0126 00:08:14.962625 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:14Z","lastTransitionTime":"2026-01-26T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.065500 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.065560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.065579 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.065607 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.065626 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.152295 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 21:01:12.72397511 +0000 UTC Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.168157 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.168207 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.168219 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.168239 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.168251 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.270800 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.270874 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.270892 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.270917 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.270936 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.373691 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.373779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.373797 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.373822 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.373837 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.477544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.477600 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.477795 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.477826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.477848 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.581495 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.581542 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.581551 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.581566 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.581575 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.683966 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.684034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.684056 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.684084 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.684106 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.788082 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.788121 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.788132 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.788167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.788180 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.890836 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.890924 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.890994 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.891024 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:15 crc kubenswrapper[4975]: I0126 00:08:15.891037 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:15.993572 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:15.993656 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:15.993674 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:15.993725 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:15.993776 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:15Z","lastTransitionTime":"2026-01-26T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.096686 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.096756 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.096771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.096793 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.096805 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.146770 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.146945 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.146982 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.147111 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.147651 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.148008 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.147898 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.147650 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.153118 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 11:22:41.250804588 +0000 UTC Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.166897 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.188220 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.200645 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.200868 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.200953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.201035 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.201110 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.202229 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.214366 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.234446 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.248119 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.263879 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.277026 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.289202 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.303838 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.303953 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.304046 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.304128 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.304214 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.306329 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:10Z\\\",\\\"message\\\":\\\"ing(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.176\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:1936, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0126 00:08:10.641209 7003 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics for network=default are: map[]\\\\nI0126 00:08:10.641219 7003 services_controller.go:444] Built service openshift-ingress/router-internal-default LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF0126 00:08:10.641225 7003 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to ver\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.317605 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.373876 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.374100 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.374244 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.374363 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.374479 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.376098 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.386160 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.387566 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.395105 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.395156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.395170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.395193 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.395208 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.400156 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.406348 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.410380 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.410432 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.410441 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.410459 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.410468 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.417563 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.422462 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.426679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.426715 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.426751 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.426779 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.426797 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.433645 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.442182 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446049 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446109 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446125 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446145 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446158 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.446800 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.458466 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: E0126 00:08:16.458581 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.460690 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.460747 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.460761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.460778 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.460791 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.461200 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.479600 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:16Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.565310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.565370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.565382 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.565399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.565409 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.669146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.669492 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.669503 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.669519 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.669530 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.772600 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.772650 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.772661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.772680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.772692 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.876672 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.876751 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.876762 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.876777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.876788 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.978767 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.978810 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.978819 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.978839 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:16 crc kubenswrapper[4975]: I0126 00:08:16.978850 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:16Z","lastTransitionTime":"2026-01-26T00:08:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.091164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.091209 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.091221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.091236 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.091246 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.154324 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 07:50:59.526272615 +0000 UTC Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.193399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.193438 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.193447 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.193463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.193473 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.295869 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.295906 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.295914 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.295928 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.295938 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.398468 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.398521 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.398533 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.398552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.398566 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.501688 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.501744 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.501754 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.501767 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.501776 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.604547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.604582 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.604590 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.604606 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.604615 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.707156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.707198 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.707210 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.707228 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.707240 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.809501 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.809547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.809560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.809843 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.809862 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.912300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.912350 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.912373 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.912399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:17 crc kubenswrapper[4975]: I0126 00:08:17.912414 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:17Z","lastTransitionTime":"2026-01-26T00:08:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.014850 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.014887 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.014895 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.014911 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.014920 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.047156 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.047255 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.047234852 +0000 UTC m=+146.168440356 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.047291 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.047325 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.047428 4975 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.047453 4975 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.047482 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.047471209 +0000 UTC m=+146.168676703 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.047498 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.04749084 +0000 UTC m=+146.168696334 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.117805 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.117849 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.117858 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.117873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.117882 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147088 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147169 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147292 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.147414 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.147659 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147796 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147842 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.147847 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.147898 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148025 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148046 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148057 4975 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148098 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148135 4975 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148159 4975 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148106 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.148093144 +0000 UTC m=+146.269298638 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148279 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.148246928 +0000 UTC m=+146.269452452 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 26 00:08:18 crc kubenswrapper[4975]: E0126 00:08:18.148326 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.155089 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 19:10:14.397443985 +0000 UTC Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.220391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.220447 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.220464 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.220490 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.220512 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.322926 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.322985 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.323002 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.323022 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.323040 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.425984 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.426017 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.426025 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.426037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.426045 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.529294 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.529356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.529380 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.529414 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.529440 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.632244 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.632287 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.632305 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.632324 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.632337 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.735394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.735440 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.735456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.735478 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.735493 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.838292 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.838365 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.838383 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.838409 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.838431 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.941719 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.941822 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.941845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.941876 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:18 crc kubenswrapper[4975]: I0126 00:08:18.941897 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:18Z","lastTransitionTime":"2026-01-26T00:08:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.045171 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.045232 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.045251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.045277 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.045294 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.148372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.148436 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.148459 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.148490 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.148511 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.155999 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 07:33:03.229025468 +0000 UTC Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.252028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.252089 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.252109 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.252136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.252156 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.355478 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.355525 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.355544 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.355567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.355584 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.458873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.458946 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.458972 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.459003 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.459027 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.562220 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.562287 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.562310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.562340 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.562361 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.664782 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.664846 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.664863 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.664888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.664909 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.767911 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.767979 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.767995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.768020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.768038 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.871356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.871421 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.871443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.871468 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.871484 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.974546 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.974838 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.975010 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.975164 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:19 crc kubenswrapper[4975]: I0126 00:08:19.975300 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:19Z","lastTransitionTime":"2026-01-26T00:08:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.079159 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.079205 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.079223 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.079245 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.079261 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.147525 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.147674 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:20 crc kubenswrapper[4975]: E0126 00:08:20.147813 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.147835 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.148024 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:20 crc kubenswrapper[4975]: E0126 00:08:20.148134 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:20 crc kubenswrapper[4975]: E0126 00:08:20.148056 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:20 crc kubenswrapper[4975]: E0126 00:08:20.148462 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.157031 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 17:07:22.258314346 +0000 UTC Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.182391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.182459 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.182478 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.182502 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.182519 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.286218 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.286274 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.286293 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.286318 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.286335 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.389503 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.389568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.389597 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.389631 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.389655 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.493099 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.493153 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.493170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.493196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.493215 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.596330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.596392 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.596409 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.596432 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.596451 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.699306 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.699342 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.699354 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.699372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.699384 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.801495 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.801540 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.801554 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.801575 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.801590 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.904202 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.904238 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.904246 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.904258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:20 crc kubenswrapper[4975]: I0126 00:08:20.904270 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:20Z","lastTransitionTime":"2026-01-26T00:08:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.007240 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.007310 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.007338 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.007368 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.007387 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.110320 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.110378 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.110392 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.110412 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.110427 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.158536 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 06:58:18.093703516 +0000 UTC Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.213158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.213206 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.213221 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.213237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.213250 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.315256 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.315320 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.315336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.315361 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.315379 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.418810 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.418883 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.418896 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.418912 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.418924 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.522052 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.522140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.522169 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.522192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.522209 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.624866 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.624929 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.624949 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.624976 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.624997 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.728388 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.728442 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.728462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.728486 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.728504 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.831284 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.831327 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.831339 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.831355 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.831367 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.934059 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.934112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.934129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.934151 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:21 crc kubenswrapper[4975]: I0126 00:08:21.934169 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:21Z","lastTransitionTime":"2026-01-26T00:08:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.036986 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.037028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.037040 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.037057 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.037069 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.140037 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.140083 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.140096 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.140112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.140124 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.146477 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.146578 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.146508 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:22 crc kubenswrapper[4975]: E0126 00:08:22.146690 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.146945 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:22 crc kubenswrapper[4975]: E0126 00:08:22.146938 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:22 crc kubenswrapper[4975]: E0126 00:08:22.147077 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:22 crc kubenswrapper[4975]: E0126 00:08:22.147293 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.159431 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 04:27:31.884669581 +0000 UTC Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.243090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.243126 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.243136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.243153 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.243165 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.346225 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.346513 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.346525 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.346541 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.346553 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.448871 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.449029 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.449047 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.449070 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.449083 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.551567 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.551613 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.551624 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.551643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.551655 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.654784 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.654826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.654841 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.654861 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.654873 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.758187 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.758267 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.758299 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.758336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.758354 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.860627 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.860661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.860672 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.860685 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.860693 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.964092 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.964194 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.964216 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.964241 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:22 crc kubenswrapper[4975]: I0126 00:08:22.964263 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:22Z","lastTransitionTime":"2026-01-26T00:08:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.066624 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.066699 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.066726 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.066792 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.066817 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.159802 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 22:11:07.888441314 +0000 UTC Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.169725 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.169783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.169792 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.169804 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.169812 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.272443 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.272524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.272543 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.272568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.272589 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.374937 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.374984 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.374999 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.375020 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.375036 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.477848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.477912 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.477928 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.477952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.477973 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.581232 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.581306 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.581327 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.581358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.581380 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.684308 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.684385 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.684403 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.684425 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.684444 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.787791 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.787866 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.787883 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.787914 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.787931 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.891151 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.891239 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.891258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.891281 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.891295 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.994426 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.994518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.994542 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.994624 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:23 crc kubenswrapper[4975]: I0126 00:08:23.994644 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:23Z","lastTransitionTime":"2026-01-26T00:08:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.097697 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.097808 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.097836 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.097867 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.097889 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.147959 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.147996 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:24 crc kubenswrapper[4975]: E0126 00:08:24.148120 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.148173 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.148207 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:24 crc kubenswrapper[4975]: E0126 00:08:24.148307 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:24 crc kubenswrapper[4975]: E0126 00:08:24.148362 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:24 crc kubenswrapper[4975]: E0126 00:08:24.148465 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.160890 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-16 11:43:50.442975985 +0000 UTC Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.201146 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.201206 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.201224 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.201251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.201272 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.304353 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.304423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.304450 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.304482 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.304507 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.407848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.407902 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.407918 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.407941 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.407962 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.510703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.510793 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.510811 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.510835 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.510852 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.613494 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.613561 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.613578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.613603 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.613621 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.716312 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.716372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.716392 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.716423 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.716443 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.820210 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.820272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.820293 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.820319 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.820336 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.923201 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.923285 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.923307 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.923337 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:24 crc kubenswrapper[4975]: I0126 00:08:24.923359 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:24Z","lastTransitionTime":"2026-01-26T00:08:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.027106 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.027158 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.027167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.027182 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.027190 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.130483 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.130539 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.130555 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.130578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.130594 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.161499 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 12:46:46.355913054 +0000 UTC Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.233389 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.233455 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.233474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.233499 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.233519 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.337059 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.337115 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.337138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.337168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.337187 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.440255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.440335 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.440360 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.440396 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.440418 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.543696 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.543807 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.543833 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.543859 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.543876 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.647034 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.647086 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.647103 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.647129 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.647146 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.750303 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.750359 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.750376 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.750399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.750417 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.853339 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.853397 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.853416 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.853444 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.853463 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.957316 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.957391 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.957415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.957452 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:25 crc kubenswrapper[4975]: I0126 00:08:25.957470 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:25Z","lastTransitionTime":"2026-01-26T00:08:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.061155 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.061213 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.061230 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.061254 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.061270 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.147234 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.147349 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.147403 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.147577 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.147660 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.147675 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.147817 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.147961 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.162048 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 21:40:06.183415268 +0000 UTC Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.164363 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.164399 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.164415 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.164439 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.164456 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.167196 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.202102 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ebf6832f57f2542a44eb72a0e448e8613417224c62d186f7bcc6c42e6f7b8946\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:07:41Z\\\",\\\"message\\\":\\\"]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:2379, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}, services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.253\\\\\\\", Port:9979, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0126 00:07:41.768441 6618 services_controller.go:444] Built service openshift-network-console/networking-console-plugin LB per-node configs for network=default: []services.lbConfig(nil)\\\\nI0126 00:07:41.768446 6618 services_controller.go:452] Built service openshift-etcd/etcd per-node LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768462 6618 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0126 00:07:41.768463 6618 services_controller.go:453] Built service openshift-etcd/etcd template LB for network=default: []services.LB{}\\\\nI0126 00:07:41.768479 6618 services_controller.go:454] Service openshift-etcd/etcd for network=default has 2 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF0126 00:07:41.768527 6618 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:40Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:10Z\\\",\\\"message\\\":\\\"ing(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.176\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:1936, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0126 00:08:10.641209 7003 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics for network=default are: map[]\\\\nI0126 00:08:10.641219 7003 services_controller.go:444] Built service openshift-ingress/router-internal-default LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF0126 00:08:10.641225 7003 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to ver\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:08:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.221080 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.240283 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.258489 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.267383 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.267546 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.267574 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.267605 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.267641 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.277387 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.298728 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.317871 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.338983 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.361652 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.370124 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.370168 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.370184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.370205 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.370220 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.394657 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.416043 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.437629 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.456444 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.472779 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.473547 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.473609 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.473628 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.473658 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.473676 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.496133 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.513205 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.535578 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.554947 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.576200 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.576260 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.576278 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.576304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.576331 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.685097 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.685167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.685188 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.685213 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.685230 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.787956 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.788018 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.788040 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.788068 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.788089 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.822184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.822237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.822261 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.822286 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.822306 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.842579 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.847578 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.847643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.847668 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.847698 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.847718 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.868935 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.873254 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.873291 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.873302 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.873317 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.873329 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.885783 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.891208 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.891273 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.891288 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.891304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.891314 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.908296 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.912630 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.912661 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.912671 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.912683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.912706 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.932919 4975 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f710b43a-30a9-4ff9-8d9b-11cb2688597c\\\",\\\"systemUUID\\\":\\\"53ded227-eb06-4731-8131-8760124d118b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:26Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:26 crc kubenswrapper[4975]: E0126 00:08:26.933049 4975 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.934856 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.934918 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.934938 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.934961 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:26 crc kubenswrapper[4975]: I0126 00:08:26.934979 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:26Z","lastTransitionTime":"2026-01-26T00:08:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.037868 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.037931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.037952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.037978 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.037998 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.141235 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.141313 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.141334 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.141362 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.141383 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.147053 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:08:27 crc kubenswrapper[4975]: E0126 00:08:27.147207 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.162471 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 05:11:11.30425354 +0000 UTC Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.169255 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d260a77f-a97d-4771-92cd-ebd476f99134\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI0126 00:07:08.527878 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0126 00:07:08.528785 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1863418295/tls.crt::/tmp/serving-cert-1863418295/tls.key\\\\\\\"\\\\nI0126 00:07:13.993766 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0126 00:07:13.997068 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0126 00:07:13.997161 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0126 00:07:13.997267 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0126 00:07:13.997518 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0126 00:07:14.007187 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0126 00:07:14.007240 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007255 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0126 00:07:14.007268 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0126 00:07:14.007276 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0126 00:07:14.007287 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0126 00:07:14.007294 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0126 00:07:14.007330 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0126 00:07:14.007850 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.189318 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8124a11398700f3dd27ecc1b05d6cae71688f9da04f2fcbd074d479180714798\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.204829 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-ckf4p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66a9d0dc-feb7-4b88-a6a6-d0ceb9bfb0f7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://547e6462c4c8eee6da199e489c309ae2b55d1d73c829c23a7f184e9d4b5d5369\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m5xfk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-ckf4p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.221069 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45c4e01d-bac9-49dd-9be1-bd759f38f2a8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://54d1b2537eed578c14de4d951b4f369e8872f50b0b7fec3b76703438105727f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2476fc9ef16d0362284c1c621f5b328d78115317cf5ef9aecef5b0da6febdc06\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2526f258681a66a1fdf45e236faac0f15abdd469cced5188c3cd5de98adf5a18\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:19Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0a11cf42d62a8e8e4f848374c233617c9b5e1afbefff1ae9e73895aaf7dd006\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:20Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8ce7cc61903889160f00663e75e8902aff8d9ec6edd8140e0a94f4a751129b9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:21Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b09041684113b6b403ad5717773c4111b40d56496abf04f42c536c0be508c2d9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://219b86e15409838960b287f2abeb4884816d7e3e0773f3c1bb68dac758989d3b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wskkd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jpmlj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.232645 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-s459q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"99d35071-9f6d-45df-841f-fd49ea0550c1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hrfhx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:30Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-s459q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.245237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.245288 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.245299 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.245317 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.245328 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.250192 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ae52bf4e-67f1-480b-af6d-2d1d2ce37e01\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b99ea9c48665a20a382ec6898326e7a9632b41cc9069e05adcea6e9af384505\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf9c453a47bb593f318201453d450cd4ae4daa241881ee03ef2449c78e23f201\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zhjbn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:29Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-xwb6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.265983 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b81a2724-c05c-4843-afc5-30e16033f895\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a238b460df2b16ee264cabce67c7af7588ea471a73cae2dceb4ee1705ec9518d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e370bf64b79a5bbd56ecd85c893e974f4e4127131fc4af7817bd3ccc33477fae\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.282960 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7db50376-23cf-4cea-b849-fe725551394d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://343687e4b9df509c30607aa4077e5e9a25d8f285e0c6223fc63fa53bfa8a194b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3e3b636746caa52fd748950983c7200d4c866f61d7d1f4bccce6bd0bc78f379e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://54540a6b1af04f2f0bd1edade567e1983994da0fee4bb985455dc1c63be1f377\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1ec892414d32222aaf5d9ce150144c5ab03b998e767f864644c5307df68d4362\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.295970 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vcvtm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e64c045-dfa3-4706-8600-03600ca4980c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://20759d23d0f710931f20bb44bed1f7cc8e75fcbe718bff9d99ddf93c67540d88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dt2sz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:15Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vcvtm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.329623 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fd68329-6540-4965-a036-ddd1045f1190\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:10Z\\\",\\\"message\\\":\\\"ing(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}, services.lbConfig{vips:[]string{\\\\\\\"10.217.4.176\\\\\\\"}, protocol:\\\\\\\"TCP\\\\\\\", inport:1936, clusterEndpoints:services.lbEndpoints{Port:0, V4IPs:[]string(nil), V6IPs:[]string(nil)}, nodeEndpoints:map[string]services.lbEndpoints{}, externalTrafficLocal:false, internalTrafficLocal:false, hasNodePort:false}}\\\\nI0126 00:08:10.641209 7003 lb_config.go:1031] Cluster endpoints for openshift-kube-apiserver-operator/metrics for network=default are: map[]\\\\nI0126 00:08:10.641219 7003 services_controller.go:444] Built service openshift-ingress/router-internal-default LB per-node configs for network=default: []services.lbConfig(nil)\\\\nF0126 00:08:10.641225 7003 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to ver\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:08:09Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rmrkv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2vrv2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.345716 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b76c31fb-14ea-4b49-8a41-0b2731967b86\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d8e09b062a38d0a8cf065855174f8627754b935f93aabd8a75cb5c400828983\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7qhgz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-f42fk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.347272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.347347 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.347372 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.347403 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.347439 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.365398 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bcsb4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7d3cba21-428c-4151-bb16-f3478d54c90e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:08:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-26T00:08:04Z\\\",\\\"message\\\":\\\"2026-01-26T00:07:19+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b\\\\n2026-01-26T00:07:19+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c7f73d9c-f631-4e28-9643-52bdc7c6a44b to /host/opt/cni/bin/\\\\n2026-01-26T00:07:19Z [verbose] multus-daemon started\\\\n2026-01-26T00:07:19Z [verbose] Readiness Indicator file check\\\\n2026-01-26T00:08:04Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-26T00:07:16Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:08:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-m9dbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:07:16Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bcsb4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.399217 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a6455a9b-854a-4342-9dd4-730dbfd89b15\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5a1b22e4d9f7aabaeb0ddf209c3f8983c733e19d8f4e3b40e0e1ae6a674a5c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ae28404ef86a453b9657b65252ebe91186b40a2d0c342c8d656a026602c0c8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acad9b1bf0136c0e31ad7cce8d2bcb6bb8fc9707083c01f40939e7f66b3508a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34f4a876adfc8a72e22c0ffccec5a700b72d2edef10690262c0c84ac69b21611\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c4d03bc6f031d48c7ca2430da07e68bc4b5d85f0c28a3ddc0cd193f8ebb4c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c16305d0810abf7e4a644d7543fa5f174ed9962b7f0144d9d34e5524213a3cc\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a936b7ff4c25a63a3ef9b6b5fc1a83ed35b8863141e9eea33740870216b69e6a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://de42b16f197ff5b0ac06401a620d795745b49cc2d98da2642ca0cd348d7b3c2d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-26T00:06:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.418624 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c6d5be1d316b668fff6b584ab88d664b6eb20b546a360d1d657725c613011180\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.438589 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.451474 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.451870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.452063 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.452222 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.452362 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.461635 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.477012 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:15Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://01d4e386461b657386859bce38cfb3128ad2cb418587ad962dd453812b92097c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cef541d5ad8842bb4d49d18e6210723231f5ef8284886e81945fe5dda981aa4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:07:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.494932 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69c90d09-f23b-4740-a6b8-d216f671088a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-26T00:06:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://64cc85aa767787477cede55dc97e2bc2c60772c4835f900047681024c9d85cfe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0a60520ad01a0c7d0ecbfd0e9af9cf63151b70d379e06d087ad686e35321664\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb29536c750d854a81754d2f57ac43d03b793ddaeecdf22a5d914512d0344677\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-26T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-26T00:06:56Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.511304 4975 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-26T00:07:14Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-26T00:08:27Z is after 2025-08-24T17:21:41Z" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.554683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.555030 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.555156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.555271 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.555361 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.658594 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.658641 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.658656 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.658679 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.658695 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.761250 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.761303 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.761317 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.761336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.761349 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.864877 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.864997 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.865012 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.865033 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.865045 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.968027 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.968090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.968110 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.968140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:27 crc kubenswrapper[4975]: I0126 00:08:27.968159 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:27Z","lastTransitionTime":"2026-01-26T00:08:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.071798 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.071944 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.071973 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.072005 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.072027 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.147129 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:28 crc kubenswrapper[4975]: E0126 00:08:28.147497 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.147540 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.147631 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:28 crc kubenswrapper[4975]: E0126 00:08:28.147681 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:28 crc kubenswrapper[4975]: E0126 00:08:28.147861 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.148087 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:28 crc kubenswrapper[4975]: E0126 00:08:28.148223 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.163413 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:37:23.592753458 +0000 UTC Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.174594 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.175556 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.175758 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.175937 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.176087 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.279119 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.279170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.279187 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.279215 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.279233 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.382424 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.382480 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.382500 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.382524 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.382541 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.485783 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.485893 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.485910 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.485933 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.485950 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.589091 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.589167 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.589192 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.589225 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.589251 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.691170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.691229 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.691239 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.691258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.691272 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.793832 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.793878 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.793888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.793904 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.793914 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.896258 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.896305 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.896314 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.896330 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.896340 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.999349 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.999420 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.999445 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.999473 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:28 crc kubenswrapper[4975]: I0126 00:08:28.999491 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:28Z","lastTransitionTime":"2026-01-26T00:08:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.599178 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-10 17:55:08.969920577 +0000 UTC Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.599313 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.599352 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.599377 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:29 crc kubenswrapper[4975]: E0126 00:08:29.602979 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.603019 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:29 crc kubenswrapper[4975]: E0126 00:08:29.603148 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:29 crc kubenswrapper[4975]: E0126 00:08:29.603264 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:29 crc kubenswrapper[4975]: E0126 00:08:29.603344 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.604609 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.604649 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.604663 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.604683 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.604696 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:29Z","lastTransitionTime":"2026-01-26T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.707057 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.707100 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.707112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.707128 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.707141 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:29Z","lastTransitionTime":"2026-01-26T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.809845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.809915 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.809926 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.809945 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.809958 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:29Z","lastTransitionTime":"2026-01-26T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.913085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.913136 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.913153 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.913177 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:29 crc kubenswrapper[4975]: I0126 00:08:29.913195 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:29Z","lastTransitionTime":"2026-01-26T00:08:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.015301 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.015346 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.015360 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.015385 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.015407 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.118448 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.118493 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.118505 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.118522 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.118534 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.221936 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.223139 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.223388 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.223639 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.223903 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.327251 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.327300 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.327314 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.327332 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.327347 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.430506 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.430562 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.430579 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.430604 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.430622 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.533270 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.533336 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.533353 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.533379 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.533397 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.599314 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 06:39:46.446065241 +0000 UTC Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.636593 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.636675 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.636703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.636767 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.636790 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.739952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.740416 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.740641 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.740845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.741013 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.844478 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.844518 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.844531 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.844553 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.844566 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.948169 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.948237 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.948255 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.948279 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:30 crc kubenswrapper[4975]: I0126 00:08:30.948299 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:30Z","lastTransitionTime":"2026-01-26T00:08:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.051509 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.052008 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.052196 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.052370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.052516 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.146556 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.146593 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.146637 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.146911 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:31 crc kubenswrapper[4975]: E0126 00:08:31.147245 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:31 crc kubenswrapper[4975]: E0126 00:08:31.147392 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:31 crc kubenswrapper[4975]: E0126 00:08:31.147496 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:31 crc kubenswrapper[4975]: E0126 00:08:31.147573 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.155086 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.155126 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.155138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.155156 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.155170 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.257995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.258067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.258090 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.258119 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.258137 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.361592 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.361873 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.362001 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.362072 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.362138 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.465226 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.465272 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.465286 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.465304 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.465318 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.568608 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.568663 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.568686 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.568703 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.568712 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.599562 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 04:58:00.885187537 +0000 UTC Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.670911 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.670961 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.670975 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.670995 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.671008 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.774373 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.774706 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.774840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.774932 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.775021 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.878581 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.878629 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.878640 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.878659 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.878671 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.981456 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.981495 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.981507 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.981523 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:31 crc kubenswrapper[4975]: I0126 00:08:31.981536 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:31Z","lastTransitionTime":"2026-01-26T00:08:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.083694 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.083784 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.083801 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.083845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.083859 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.186595 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.186653 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.186671 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.186701 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.186721 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.289718 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.289831 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.289854 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.289887 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.289904 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.392907 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.392962 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.392974 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.392994 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.393007 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.496046 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.496116 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.496138 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.496172 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.496197 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600069 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 04:04:05.970076526 +0000 UTC Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600101 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600159 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600184 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600214 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.600235 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.702997 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.703050 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.703067 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.703092 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.703113 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.806326 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.806386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.806410 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.806438 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.806558 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.910434 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.910902 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.911112 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.911303 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:32 crc kubenswrapper[4975]: I0126 00:08:32.911489 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:32Z","lastTransitionTime":"2026-01-26T00:08:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.014102 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.014140 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.014151 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.014170 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.014183 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.116760 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.116822 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.116840 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.116868 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.116884 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.146624 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.146663 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.146629 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.146624 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:33 crc kubenswrapper[4975]: E0126 00:08:33.146765 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:33 crc kubenswrapper[4975]: E0126 00:08:33.146874 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:33 crc kubenswrapper[4975]: E0126 00:08:33.147010 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:33 crc kubenswrapper[4975]: E0126 00:08:33.147232 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.224483 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.224552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.224573 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.224611 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.224630 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.329108 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.329176 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.329198 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.329224 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.329244 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.432392 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.432463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.432485 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.432514 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.432535 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.534666 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.534725 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.534809 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.534845 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.534867 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.600649 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 21:43:53.144671779 +0000 UTC Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.637701 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.637763 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.637771 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.637787 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.637796 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.740030 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.740082 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.740096 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.740114 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.740127 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.843487 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.843560 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.843596 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.843626 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.843649 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.947813 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.947888 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.947907 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.947931 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:33 crc kubenswrapper[4975]: I0126 00:08:33.947947 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:33Z","lastTransitionTime":"2026-01-26T00:08:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.051715 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.051788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.051800 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.051826 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.051839 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.154469 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.154516 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.154528 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.154552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.154566 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.257713 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.257806 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.257823 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.257850 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.257868 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.360282 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.360325 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.360337 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.360355 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.360367 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.453345 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:34 crc kubenswrapper[4975]: E0126 00:08:34.453557 4975 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:08:34 crc kubenswrapper[4975]: E0126 00:08:34.453700 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs podName:99d35071-9f6d-45df-841f-fd49ea0550c1 nodeName:}" failed. No retries permitted until 2026-01-26 00:09:38.45366682 +0000 UTC m=+162.574872394 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs") pod "network-metrics-daemon-s459q" (UID: "99d35071-9f6d-45df-841f-fd49ea0550c1") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.463027 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.463071 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.463085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.463105 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.463120 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.565311 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.565358 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.565369 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.565386 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.565397 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.601113 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 08:53:01.82303325 +0000 UTC Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.667242 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.667297 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.667311 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.667331 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.667346 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.769482 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.769539 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.769549 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.769569 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.769583 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.871355 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.871395 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.871406 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.871422 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.871434 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.974489 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.974551 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.974568 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.974593 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:34 crc kubenswrapper[4975]: I0126 00:08:34.974612 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:34Z","lastTransitionTime":"2026-01-26T00:08:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.077798 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.077857 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.077876 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.077900 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.077917 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.146465 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.146503 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.146555 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:35 crc kubenswrapper[4975]: E0126 00:08:35.146589 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.146629 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:35 crc kubenswrapper[4975]: E0126 00:08:35.146706 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:35 crc kubenswrapper[4975]: E0126 00:08:35.146813 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:35 crc kubenswrapper[4975]: E0126 00:08:35.146864 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.181571 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.181635 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.181652 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.181676 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.181694 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.284666 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.284761 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.284784 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.284808 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.284825 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.387376 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.387425 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.387439 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.387462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.387476 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.490643 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.490695 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.490712 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.490770 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.490789 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.593397 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.593702 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.593788 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.593827 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.593850 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.601646 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 01:12:47.615662091 +0000 UTC Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.696311 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.696375 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.696398 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.696427 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.696450 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.799585 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.799657 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.799680 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.799710 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.799771 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.902462 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.902552 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.902572 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.902597 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:35 crc kubenswrapper[4975]: I0126 00:08:35.902615 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:35Z","lastTransitionTime":"2026-01-26T00:08:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.005777 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.005852 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.005877 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.005907 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.005929 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.109548 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.109618 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.109640 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.109670 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.109692 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.212763 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.212837 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.212848 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.212895 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.212910 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.217670 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=82.217654789 podStartE2EDuration="1m22.217654789s" podCreationTimestamp="2026-01-26 00:07:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.196696136 +0000 UTC m=+100.317901710" watchObservedRunningTime="2026-01-26 00:08:36.217654789 +0000 UTC m=+100.338860293" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.229974 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-ckf4p" podStartSLOduration=81.229940973 podStartE2EDuration="1m21.229940973s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.228849261 +0000 UTC m=+100.350054765" watchObservedRunningTime="2026-01-26 00:08:36.229940973 +0000 UTC m=+100.351146507" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.270166 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podStartSLOduration=81.270141389 podStartE2EDuration="1m21.270141389s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.267479583 +0000 UTC m=+100.388685087" watchObservedRunningTime="2026-01-26 00:08:36.270141389 +0000 UTC m=+100.391346903" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.270537 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-jpmlj" podStartSLOduration=81.27052784 podStartE2EDuration="1m21.27052784s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.249200857 +0000 UTC m=+100.370406361" watchObservedRunningTime="2026-01-26 00:08:36.27052784 +0000 UTC m=+100.391733344" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.299067 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-xwb6p" podStartSLOduration=80.299047641 podStartE2EDuration="1m20.299047641s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.285097679 +0000 UTC m=+100.406303183" watchObservedRunningTime="2026-01-26 00:08:36.299047641 +0000 UTC m=+100.420253135" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.311028 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=31.311005595 podStartE2EDuration="31.311005595s" podCreationTimestamp="2026-01-26 00:08:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.299663098 +0000 UTC m=+100.420868592" watchObservedRunningTime="2026-01-26 00:08:36.311005595 +0000 UTC m=+100.432211089" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.315215 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.315257 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.315269 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.315288 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.315301 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.323432 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=46.323405912 podStartE2EDuration="46.323405912s" podCreationTimestamp="2026-01-26 00:07:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.311409576 +0000 UTC m=+100.432615070" watchObservedRunningTime="2026-01-26 00:08:36.323405912 +0000 UTC m=+100.444611426" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.355333 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vcvtm" podStartSLOduration=81.35531271 podStartE2EDuration="1m21.35531271s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.323874505 +0000 UTC m=+100.445079999" watchObservedRunningTime="2026-01-26 00:08:36.35531271 +0000 UTC m=+100.476518204" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.417812 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.417858 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.417870 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.417885 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.417897 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.471470 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=80.471446171 podStartE2EDuration="1m20.471446171s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.469784283 +0000 UTC m=+100.590989777" watchObservedRunningTime="2026-01-26 00:08:36.471446171 +0000 UTC m=+100.592651665" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.472156 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-bcsb4" podStartSLOduration=81.472148621 podStartE2EDuration="1m21.472148621s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.44534976 +0000 UTC m=+100.566555254" watchObservedRunningTime="2026-01-26 00:08:36.472148621 +0000 UTC m=+100.593354115" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.521367 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.521648 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.521756 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.521876 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.521968 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.527787 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.527762761 podStartE2EDuration="1m18.527762761s" podCreationTimestamp="2026-01-26 00:07:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:36.526791233 +0000 UTC m=+100.647996737" watchObservedRunningTime="2026-01-26 00:08:36.527762761 +0000 UTC m=+100.648968265" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.602623 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 04:02:35.133282713 +0000 UTC Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.625006 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.625051 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.625064 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.625085 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.625102 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.727312 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.727356 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.727370 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.727394 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.727409 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.829984 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.830028 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.830039 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.830056 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.830067 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.932898 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.932939 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.932952 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.932968 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:36 crc kubenswrapper[4975]: I0126 00:08:36.932981 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:36Z","lastTransitionTime":"2026-01-26T00:08:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.012418 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.012463 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.012479 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.012499 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.012514 4975 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-26T00:08:37Z","lastTransitionTime":"2026-01-26T00:08:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.074425 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt"] Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.075010 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.078496 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.078695 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.078983 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.079843 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.146311 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.146377 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.146650 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:37 crc kubenswrapper[4975]: E0126 00:08:37.146875 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.146899 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:37 crc kubenswrapper[4975]: E0126 00:08:37.146979 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:37 crc kubenswrapper[4975]: E0126 00:08:37.147140 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:37 crc kubenswrapper[4975]: E0126 00:08:37.147281 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.185051 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.185198 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.185318 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.185481 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.185585 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.286466 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.286527 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.286560 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.286661 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.286700 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.287425 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.287443 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.288686 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.304636 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.311195 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dd5f119e-7f37-4af5-9e64-b9f2c4ae4152-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjvqt\" (UID: \"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.395796 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" Jan 26 00:08:37 crc kubenswrapper[4975]: W0126 00:08:37.423347 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd5f119e_7f37_4af5_9e64_b9f2c4ae4152.slice/crio-779fdd35a94a3817b3b0b30bea700d3ce7dca79dfdf9e2fb3becd7809e7203f4 WatchSource:0}: Error finding container 779fdd35a94a3817b3b0b30bea700d3ce7dca79dfdf9e2fb3becd7809e7203f4: Status 404 returned error can't find the container with id 779fdd35a94a3817b3b0b30bea700d3ce7dca79dfdf9e2fb3becd7809e7203f4 Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.603695 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 20:35:20.057456485 +0000 UTC Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.604323 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.616040 4975 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.692419 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" event={"ID":"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152","Type":"ContainerStarted","Data":"0967f85a097d9c6d08ef3f0107d8d8ddab449f612c01da79e629a81fb86c01d3"} Jan 26 00:08:37 crc kubenswrapper[4975]: I0126 00:08:37.692473 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" event={"ID":"dd5f119e-7f37-4af5-9e64-b9f2c4ae4152","Type":"ContainerStarted","Data":"779fdd35a94a3817b3b0b30bea700d3ce7dca79dfdf9e2fb3becd7809e7203f4"} Jan 26 00:08:38 crc kubenswrapper[4975]: I0126 00:08:38.149232 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:08:38 crc kubenswrapper[4975]: E0126 00:08:38.149626 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:08:39 crc kubenswrapper[4975]: I0126 00:08:39.147293 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:39 crc kubenswrapper[4975]: I0126 00:08:39.147304 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:39 crc kubenswrapper[4975]: I0126 00:08:39.147301 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:39 crc kubenswrapper[4975]: I0126 00:08:39.147457 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:39 crc kubenswrapper[4975]: E0126 00:08:39.147806 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:39 crc kubenswrapper[4975]: E0126 00:08:39.148210 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:39 crc kubenswrapper[4975]: E0126 00:08:39.148282 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:39 crc kubenswrapper[4975]: E0126 00:08:39.147645 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:41 crc kubenswrapper[4975]: I0126 00:08:41.146811 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:41 crc kubenswrapper[4975]: I0126 00:08:41.146885 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:41 crc kubenswrapper[4975]: I0126 00:08:41.146881 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:41 crc kubenswrapper[4975]: I0126 00:08:41.146829 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:41 crc kubenswrapper[4975]: E0126 00:08:41.146979 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:41 crc kubenswrapper[4975]: E0126 00:08:41.147128 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:41 crc kubenswrapper[4975]: E0126 00:08:41.147183 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:41 crc kubenswrapper[4975]: E0126 00:08:41.147222 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:43 crc kubenswrapper[4975]: I0126 00:08:43.146802 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:43 crc kubenswrapper[4975]: I0126 00:08:43.146849 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:43 crc kubenswrapper[4975]: I0126 00:08:43.146860 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:43 crc kubenswrapper[4975]: I0126 00:08:43.146730 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:43 crc kubenswrapper[4975]: E0126 00:08:43.147013 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:43 crc kubenswrapper[4975]: E0126 00:08:43.147156 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:43 crc kubenswrapper[4975]: E0126 00:08:43.147386 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:43 crc kubenswrapper[4975]: E0126 00:08:43.147474 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:45 crc kubenswrapper[4975]: I0126 00:08:45.146725 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:45 crc kubenswrapper[4975]: I0126 00:08:45.146803 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:45 crc kubenswrapper[4975]: I0126 00:08:45.146862 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:45 crc kubenswrapper[4975]: I0126 00:08:45.146912 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:45 crc kubenswrapper[4975]: E0126 00:08:45.148077 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:45 crc kubenswrapper[4975]: E0126 00:08:45.148085 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:45 crc kubenswrapper[4975]: E0126 00:08:45.148316 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:45 crc kubenswrapper[4975]: E0126 00:08:45.148509 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:47 crc kubenswrapper[4975]: I0126 00:08:47.146249 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:47 crc kubenswrapper[4975]: I0126 00:08:47.146273 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:47 crc kubenswrapper[4975]: E0126 00:08:47.147254 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:47 crc kubenswrapper[4975]: I0126 00:08:47.146440 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:47 crc kubenswrapper[4975]: E0126 00:08:47.147307 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:47 crc kubenswrapper[4975]: I0126 00:08:47.146309 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:47 crc kubenswrapper[4975]: E0126 00:08:47.147522 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:47 crc kubenswrapper[4975]: E0126 00:08:47.147799 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:49 crc kubenswrapper[4975]: I0126 00:08:49.147201 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:49 crc kubenswrapper[4975]: E0126 00:08:49.147436 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:49 crc kubenswrapper[4975]: I0126 00:08:49.147555 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:49 crc kubenswrapper[4975]: E0126 00:08:49.147664 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:49 crc kubenswrapper[4975]: I0126 00:08:49.148448 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:49 crc kubenswrapper[4975]: I0126 00:08:49.148491 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:49 crc kubenswrapper[4975]: E0126 00:08:49.148556 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:49 crc kubenswrapper[4975]: E0126 00:08:49.148710 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:49 crc kubenswrapper[4975]: I0126 00:08:49.149319 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:08:49 crc kubenswrapper[4975]: E0126 00:08:49.149636 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2vrv2_openshift-ovn-kubernetes(3fd68329-6540-4965-a036-ddd1045f1190)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.146169 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.146234 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:51 crc kubenswrapper[4975]: E0126 00:08:51.146300 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:51 crc kubenswrapper[4975]: E0126 00:08:51.146392 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.146495 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:51 crc kubenswrapper[4975]: E0126 00:08:51.146928 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.146983 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:51 crc kubenswrapper[4975]: E0126 00:08:51.147168 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.742636 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/1.log" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.743459 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/0.log" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.743591 4975 generic.go:334] "Generic (PLEG): container finished" podID="7d3cba21-428c-4151-bb16-f3478d54c90e" containerID="c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b" exitCode=1 Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.743652 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerDied","Data":"c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b"} Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.743725 4975 scope.go:117] "RemoveContainer" containerID="e885c1d4d64daf551a6ccf19a05b7c645e452ead0fbedd28fad78691f6984450" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.744599 4975 scope.go:117] "RemoveContainer" containerID="c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b" Jan 26 00:08:51 crc kubenswrapper[4975]: E0126 00:08:51.744979 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-bcsb4_openshift-multus(7d3cba21-428c-4151-bb16-f3478d54c90e)\"" pod="openshift-multus/multus-bcsb4" podUID="7d3cba21-428c-4151-bb16-f3478d54c90e" Jan 26 00:08:51 crc kubenswrapper[4975]: I0126 00:08:51.772036 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjvqt" podStartSLOduration=96.771923709 podStartE2EDuration="1m36.771923709s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:08:37.716254914 +0000 UTC m=+101.837460448" watchObservedRunningTime="2026-01-26 00:08:51.771923709 +0000 UTC m=+115.893129243" Jan 26 00:08:52 crc kubenswrapper[4975]: I0126 00:08:52.748459 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/1.log" Jan 26 00:08:53 crc kubenswrapper[4975]: I0126 00:08:53.147220 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:53 crc kubenswrapper[4975]: I0126 00:08:53.147381 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:53 crc kubenswrapper[4975]: E0126 00:08:53.147874 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:53 crc kubenswrapper[4975]: E0126 00:08:53.148074 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:53 crc kubenswrapper[4975]: I0126 00:08:53.147418 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:53 crc kubenswrapper[4975]: I0126 00:08:53.147517 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:53 crc kubenswrapper[4975]: E0126 00:08:53.148347 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:53 crc kubenswrapper[4975]: E0126 00:08:53.148229 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:55 crc kubenswrapper[4975]: I0126 00:08:55.146530 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:55 crc kubenswrapper[4975]: I0126 00:08:55.146605 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:55 crc kubenswrapper[4975]: I0126 00:08:55.146530 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:55 crc kubenswrapper[4975]: E0126 00:08:55.146777 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:55 crc kubenswrapper[4975]: I0126 00:08:55.146911 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:55 crc kubenswrapper[4975]: E0126 00:08:55.146978 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:55 crc kubenswrapper[4975]: E0126 00:08:55.147128 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:55 crc kubenswrapper[4975]: E0126 00:08:55.147551 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:56 crc kubenswrapper[4975]: E0126 00:08:56.138995 4975 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 26 00:08:56 crc kubenswrapper[4975]: E0126 00:08:56.241437 4975 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 00:08:57 crc kubenswrapper[4975]: I0126 00:08:57.147302 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:57 crc kubenswrapper[4975]: I0126 00:08:57.147542 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:57 crc kubenswrapper[4975]: I0126 00:08:57.147355 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:57 crc kubenswrapper[4975]: E0126 00:08:57.147634 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:57 crc kubenswrapper[4975]: I0126 00:08:57.147356 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:57 crc kubenswrapper[4975]: E0126 00:08:57.147809 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:08:57 crc kubenswrapper[4975]: E0126 00:08:57.147940 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:57 crc kubenswrapper[4975]: E0126 00:08:57.148081 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:59 crc kubenswrapper[4975]: I0126 00:08:59.146991 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:08:59 crc kubenswrapper[4975]: I0126 00:08:59.147050 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:08:59 crc kubenswrapper[4975]: E0126 00:08:59.148081 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:08:59 crc kubenswrapper[4975]: E0126 00:08:59.148318 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:08:59 crc kubenswrapper[4975]: I0126 00:08:59.147152 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:08:59 crc kubenswrapper[4975]: I0126 00:08:59.147211 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:08:59 crc kubenswrapper[4975]: E0126 00:08:59.148513 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:08:59 crc kubenswrapper[4975]: E0126 00:08:59.148691 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:01 crc kubenswrapper[4975]: I0126 00:09:01.146254 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:01 crc kubenswrapper[4975]: I0126 00:09:01.146361 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:01 crc kubenswrapper[4975]: I0126 00:09:01.146365 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:01 crc kubenswrapper[4975]: I0126 00:09:01.146486 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:01 crc kubenswrapper[4975]: E0126 00:09:01.146486 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:01 crc kubenswrapper[4975]: E0126 00:09:01.146600 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:01 crc kubenswrapper[4975]: E0126 00:09:01.146874 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:01 crc kubenswrapper[4975]: E0126 00:09:01.147028 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:01 crc kubenswrapper[4975]: E0126 00:09:01.243561 4975 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.146969 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.147131 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:03 crc kubenswrapper[4975]: E0126 00:09:03.147147 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:03 crc kubenswrapper[4975]: E0126 00:09:03.147237 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.147304 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.147696 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.147824 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:09:03 crc kubenswrapper[4975]: E0126 00:09:03.147707 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:03 crc kubenswrapper[4975]: E0126 00:09:03.147972 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.791907 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/3.log" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.794466 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerStarted","Data":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.795255 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:09:03 crc kubenswrapper[4975]: I0126 00:09:03.828371 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podStartSLOduration=108.828341411 podStartE2EDuration="1m48.828341411s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:03.827211348 +0000 UTC m=+127.948416842" watchObservedRunningTime="2026-01-26 00:09:03.828341411 +0000 UTC m=+127.949546925" Jan 26 00:09:04 crc kubenswrapper[4975]: I0126 00:09:04.261703 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-s459q"] Jan 26 00:09:04 crc kubenswrapper[4975]: I0126 00:09:04.261844 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:04 crc kubenswrapper[4975]: E0126 00:09:04.261942 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.147071 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.147603 4975 scope.go:117] "RemoveContainer" containerID="c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.147196 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:05 crc kubenswrapper[4975]: E0126 00:09:05.147640 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:05 crc kubenswrapper[4975]: E0126 00:09:05.147772 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.147248 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:05 crc kubenswrapper[4975]: E0126 00:09:05.148064 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.803873 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/1.log" Jan 26 00:09:05 crc kubenswrapper[4975]: I0126 00:09:05.803932 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerStarted","Data":"291b8b404b3282be2bbfe47022cd28cb25ec61846c02573816da8fbe453e002a"} Jan 26 00:09:06 crc kubenswrapper[4975]: I0126 00:09:06.147194 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:06 crc kubenswrapper[4975]: E0126 00:09:06.148681 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:06 crc kubenswrapper[4975]: E0126 00:09:06.244167 4975 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 26 00:09:07 crc kubenswrapper[4975]: I0126 00:09:07.146962 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:07 crc kubenswrapper[4975]: I0126 00:09:07.147059 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:07 crc kubenswrapper[4975]: I0126 00:09:07.146997 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:07 crc kubenswrapper[4975]: E0126 00:09:07.147226 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:07 crc kubenswrapper[4975]: E0126 00:09:07.147411 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:07 crc kubenswrapper[4975]: E0126 00:09:07.147590 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:08 crc kubenswrapper[4975]: I0126 00:09:08.146299 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:08 crc kubenswrapper[4975]: E0126 00:09:08.146545 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:09 crc kubenswrapper[4975]: I0126 00:09:09.147020 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:09 crc kubenswrapper[4975]: I0126 00:09:09.147057 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:09 crc kubenswrapper[4975]: I0126 00:09:09.147144 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:09 crc kubenswrapper[4975]: E0126 00:09:09.147183 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:09 crc kubenswrapper[4975]: E0126 00:09:09.147343 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:09 crc kubenswrapper[4975]: E0126 00:09:09.147406 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:10 crc kubenswrapper[4975]: I0126 00:09:10.146361 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:10 crc kubenswrapper[4975]: E0126 00:09:10.146589 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-s459q" podUID="99d35071-9f6d-45df-841f-fd49ea0550c1" Jan 26 00:09:10 crc kubenswrapper[4975]: I0126 00:09:10.421973 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:09:11 crc kubenswrapper[4975]: I0126 00:09:11.147238 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:11 crc kubenswrapper[4975]: I0126 00:09:11.147287 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:11 crc kubenswrapper[4975]: I0126 00:09:11.147391 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:11 crc kubenswrapper[4975]: E0126 00:09:11.147592 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 26 00:09:11 crc kubenswrapper[4975]: E0126 00:09:11.147788 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 26 00:09:11 crc kubenswrapper[4975]: E0126 00:09:11.147915 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 26 00:09:12 crc kubenswrapper[4975]: I0126 00:09:12.146991 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:12 crc kubenswrapper[4975]: I0126 00:09:12.148848 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 00:09:12 crc kubenswrapper[4975]: I0126 00:09:12.148917 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.146683 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.146788 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.146683 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.149946 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.149991 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.150127 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 00:09:13 crc kubenswrapper[4975]: I0126 00:09:13.151044 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 00:09:17 crc kubenswrapper[4975]: I0126 00:09:17.971623 4975 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.015903 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xh2mk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.016599 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.020585 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2rcjc"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.020981 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.021168 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.021521 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.021686 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.021686 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.023310 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.024100 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.024246 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.025900 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.025941 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-wrh4k"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.026104 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.026305 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.028318 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.028583 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.030083 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.032044 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.032127 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.032940 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.033447 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.037174 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.037914 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.038799 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.039448 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.039814 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.040039 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.041438 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.042749 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.057408 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.057523 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.057804 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.058339 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.058368 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059099 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059124 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059204 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059307 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059357 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059538 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059702 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059856 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.059979 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060027 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060149 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060204 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060155 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060394 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060541 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060665 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060830 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.060976 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.061424 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29489760-c9d48"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.061835 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.062272 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.062344 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.062382 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.062710 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.062877 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.063011 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.063162 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.064212 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-b8p7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.064949 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.065276 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-cbkrr"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.065881 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.067760 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xmsj9"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.068778 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.070814 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072140 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072267 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072329 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072537 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072694 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.072900 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.073102 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074014 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074115 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074334 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074609 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074650 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.074967 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.075261 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrnhz"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.075993 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.076588 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xh2mk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.076704 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.077383 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.077649 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.077903 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.080542 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.080719 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.080848 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.080967 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.081178 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.081508 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2rcjc"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.081998 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.082444 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.082591 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.082990 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.083298 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.083985 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084318 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084427 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084523 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084645 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084750 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084796 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084904 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084955 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.084988 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090607 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090675 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48682975-6b38-48ad-abbd-b1c89ac4bbd9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090723 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-config\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090784 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-oauth-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090808 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwtz2\" (UniqueName: \"kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090854 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64ca38fe-db66-4303-a97e-77b7fa4c6214-serving-cert\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090878 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48682975-6b38-48ad-abbd-b1c89ac4bbd9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090906 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090951 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090962 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.090984 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4376008-2a48-499e-a548-0b5f233e3af7-serving-cert\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091009 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg85s\" (UniqueName: \"kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091033 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc6x9\" (UniqueName: \"kubernetes.io/projected/c4376008-2a48-499e-a548-0b5f233e3af7-kube-api-access-nc6x9\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091056 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091079 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091134 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091166 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c4376008-2a48-499e-a548-0b5f233e3af7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091350 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091377 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091506 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091536 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz2mk\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-kube-api-access-xz2mk\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091565 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d50d7251-c487-444f-ae08-3713d8ea5ce3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091593 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzd7x\" (UniqueName: \"kubernetes.io/projected/64ca38fe-db66-4303-a97e-77b7fa4c6214-kube-api-access-qzd7x\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091791 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nzkg\" (UniqueName: \"kubernetes.io/projected/d50d7251-c487-444f-ae08-3713d8ea5ce3-kube-api-access-8nzkg\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091837 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-trusted-ca\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.091862 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gk4r\" (UniqueName: \"kubernetes.io/projected/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-kube-api-access-6gk4r\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092008 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092040 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76x8b\" (UniqueName: \"kubernetes.io/projected/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-kube-api-access-76x8b\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092067 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-trusted-ca-bundle\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092101 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-oauth-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092168 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-service-ca\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092217 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092353 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5pbh\" (UniqueName: \"kubernetes.io/projected/48682975-6b38-48ad-abbd-b1c89ac4bbd9-kube-api-access-l5pbh\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092613 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092681 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.092690 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-config\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.093251 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-images\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.093604 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.094233 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.094452 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.098143 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.098860 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.100788 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.102706 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.103235 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.103439 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.104235 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.105208 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.112390 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.112460 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.112800 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.113032 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.113202 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.116019 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.116333 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.116677 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.117014 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.117113 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.117231 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.117397 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.122672 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.127555 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.133956 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195407 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-oauth-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195457 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195485 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195543 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195565 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-serving-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195585 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195602 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195621 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195640 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195661 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-service-ca\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195680 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195705 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5pbh\" (UniqueName: \"kubernetes.io/projected/48682975-6b38-48ad-abbd-b1c89ac4bbd9-kube-api-access-l5pbh\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195727 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-config\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195768 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195788 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-config\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195805 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-images\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195823 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195841 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195859 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/030235cf-11d2-431b-92f7-c46704c420e8-serving-cert\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195880 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195898 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48682975-6b38-48ad-abbd-b1c89ac4bbd9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195942 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptt94\" (UniqueName: \"kubernetes.io/projected/7aada3dd-f7ae-4129-b168-366122ad0ef1-kube-api-access-ptt94\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195961 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8smfs\" (UniqueName: \"kubernetes.io/projected/030235cf-11d2-431b-92f7-c46704c420e8-kube-api-access-8smfs\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.195996 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196014 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196030 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196053 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-config\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196074 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196093 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-encryption-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196110 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-policies\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196130 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-client\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196156 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-oauth-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196173 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwtz2\" (UniqueName: \"kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196195 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-client\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196211 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64ca38fe-db66-4303-a97e-77b7fa4c6214-serving-cert\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196228 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48682975-6b38-48ad-abbd-b1c89ac4bbd9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196246 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196263 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-serving-cert\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196279 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196294 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-auth-proxy-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196315 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196329 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196346 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4376008-2a48-499e-a548-0b5f233e3af7-serving-cert\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196365 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg85s\" (UniqueName: \"kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196383 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc6x9\" (UniqueName: \"kubernetes.io/projected/c4376008-2a48-499e-a548-0b5f233e3af7-kube-api-access-nc6x9\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196400 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196417 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196442 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196459 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196474 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196490 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196507 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c4376008-2a48-499e-a548-0b5f233e3af7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196524 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqlth\" (UniqueName: \"kubernetes.io/projected/41509c7e-a96a-44ce-a24e-dbd2f80386b0-kube-api-access-gqlth\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196543 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7aada3dd-f7ae-4129-b168-366122ad0ef1-machine-approver-tls\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196559 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67hdw\" (UniqueName: \"kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196588 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196607 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196626 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-node-pullsecrets\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196646 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196675 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz2mk\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-kube-api-access-xz2mk\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196693 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196710 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196744 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d50d7251-c487-444f-ae08-3713d8ea5ce3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196762 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzd7x\" (UniqueName: \"kubernetes.io/projected/64ca38fe-db66-4303-a97e-77b7fa4c6214-kube-api-access-qzd7x\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196779 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-dir\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196797 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-image-import-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196813 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196830 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nzkg\" (UniqueName: \"kubernetes.io/projected/d50d7251-c487-444f-ae08-3713d8ea5ce3-kube-api-access-8nzkg\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196855 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-trusted-ca\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196873 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gk4r\" (UniqueName: \"kubernetes.io/projected/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-kube-api-access-6gk4r\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196889 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlnxj\" (UniqueName: \"kubernetes.io/projected/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-kube-api-access-xlnxj\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196906 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196924 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-encryption-config\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196940 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76x8b\" (UniqueName: \"kubernetes.io/projected/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-kube-api-access-76x8b\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196956 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-trusted-ca-bundle\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.196996 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit-dir\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.197014 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.197040 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-serving-cert\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.197055 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf9vp\" (UniqueName: \"kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.197073 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2smpc\" (UniqueName: \"kubernetes.io/projected/c1bf1b13-966b-4a74-935c-47af817d7777-kube-api-access-2smpc\") pod \"downloads-7954f5f757-cbkrr\" (UID: \"c1bf1b13-966b-4a74-935c-47af817d7777\") " pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.197091 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.198938 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.199663 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48682975-6b38-48ad-abbd-b1c89ac4bbd9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.199743 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-config\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.200486 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-images\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.200765 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cbkrr"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.200949 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.201931 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.202397 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/c4376008-2a48-499e-a548-0b5f233e3af7-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.202777 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-config\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.203606 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-service-ca\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.204073 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29489760-c9d48"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.204581 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-trusted-ca-bundle\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.204839 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/64ca38fe-db66-4303-a97e-77b7fa4c6214-trusted-ca\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.206358 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.210858 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.219880 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.220340 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.220980 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.221385 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-oauth-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.231052 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.232154 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.232559 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5pbh\" (UniqueName: \"kubernetes.io/projected/48682975-6b38-48ad-abbd-b1c89ac4bbd9-kube-api-access-l5pbh\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.239399 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-serving-cert\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.239585 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.241008 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg85s\" (UniqueName: \"kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.241394 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/d50d7251-c487-444f-ae08-3713d8ea5ce3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.242812 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.244329 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/64ca38fe-db66-4303-a97e-77b7fa4c6214-serving-cert\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.245256 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/48682975-6b38-48ad-abbd-b1c89ac4bbd9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-gqm5h\" (UID: \"48682975-6b38-48ad-abbd-b1c89ac4bbd9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.251305 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-console-oauth-config\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.254248 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.254992 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.257850 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-b4hpk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.260822 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9hgnx"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.261856 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c4376008-2a48-499e-a548-0b5f233e3af7-serving-cert\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.271898 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.272706 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nzkg\" (UniqueName: \"kubernetes.io/projected/d50d7251-c487-444f-ae08-3713d8ea5ce3-kube-api-access-8nzkg\") pod \"cluster-samples-operator-665b6dd947-8r7mh\" (UID: \"d50d7251-c487-444f-ae08-3713d8ea5ce3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.285441 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.287444 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert\") pod \"controller-manager-879f6c89f-krk6p\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.288050 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzd7x\" (UniqueName: \"kubernetes.io/projected/64ca38fe-db66-4303-a97e-77b7fa4c6214-kube-api-access-qzd7x\") pod \"console-operator-58897d9998-2rcjc\" (UID: \"64ca38fe-db66-4303-a97e-77b7fa4c6214\") " pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.289027 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.291556 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gk4r\" (UniqueName: \"kubernetes.io/projected/fe206c78-fea9-4b0e-b236-3e4bd73f1d13-kube-api-access-6gk4r\") pod \"console-f9d7485db-wrh4k\" (UID: \"fe206c78-fea9-4b0e-b236-3e4bd73f1d13\") " pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.296345 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76x8b\" (UniqueName: \"kubernetes.io/projected/be0d8f61-b7b4-48cf-a6cb-6780df7d99d6-kube-api-access-76x8b\") pod \"machine-api-operator-5694c8668f-xh2mk\" (UID: \"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.297310 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-6hw84"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.298103 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.298444 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.299550 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.299591 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-serving-cert\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.299624 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc715eca-0758-4aa8-90e6-59b28717a44c-config-volume\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.299671 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-auth-proxy-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.299703 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300012 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gj7kf\" (UniqueName: \"kubernetes.io/projected/51627ad8-bcac-4c0e-934d-b99aa94b87ca-kube-api-access-gj7kf\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300187 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300218 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300252 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w2s7\" (UniqueName: \"kubernetes.io/projected/bc715eca-0758-4aa8-90e6-59b28717a44c-kube-api-access-5w2s7\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300285 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqlth\" (UniqueName: \"kubernetes.io/projected/41509c7e-a96a-44ce-a24e-dbd2f80386b0-kube-api-access-gqlth\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300316 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300351 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7aada3dd-f7ae-4129-b168-366122ad0ef1-machine-approver-tls\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300390 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67hdw\" (UniqueName: \"kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300430 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-config\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300460 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc715eca-0758-4aa8-90e6-59b28717a44c-metrics-tls\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300482 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-service-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300510 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-node-pullsecrets\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300551 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300634 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300673 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-dir\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300703 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-image-import-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.301614 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.301863 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-client\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302015 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42a9c706-06f8-489f-8b1a-769b1101a2d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302127 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-encryption-config\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302212 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlnxj\" (UniqueName: \"kubernetes.io/projected/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-kube-api-access-xlnxj\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302312 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit-dir\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302394 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-serving-cert\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302511 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42a9c706-06f8-489f-8b1a-769b1101a2d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302619 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302713 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf9vp\" (UniqueName: \"kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302824 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2smpc\" (UniqueName: \"kubernetes.io/projected/c1bf1b13-966b-4a74-935c-47af817d7777-kube-api-access-2smpc\") pod \"downloads-7954f5f757-cbkrr\" (UID: \"c1bf1b13-966b-4a74-935c-47af817d7777\") " pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302902 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-serving-cert\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302983 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303092 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303147 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303217 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-dir\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303293 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303368 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303447 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303520 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-serving-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303615 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303718 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303820 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303901 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-config\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304010 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304125 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/030235cf-11d2-431b-92f7-c46704c420e8-serving-cert\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304209 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8smfs\" (UniqueName: \"kubernetes.io/projected/030235cf-11d2-431b-92f7-c46704c420e8-kube-api-access-8smfs\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304314 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304450 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptt94\" (UniqueName: \"kubernetes.io/projected/7aada3dd-f7ae-4129-b168-366122ad0ef1-kube-api-access-ptt94\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304637 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304748 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304849 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304939 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-encryption-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.306618 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/7aada3dd-f7ae-4129-b168-366122ad0ef1-machine-approver-tls\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302353 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-node-pullsecrets\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.307192 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-audit-dir\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.307710 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.303931 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-image-import-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.300507 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xnxsr"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.302392 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.304432 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc6x9\" (UniqueName: \"kubernetes.io/projected/c4376008-2a48-499e-a548-0b5f233e3af7-kube-api-access-nc6x9\") pod \"openshift-config-operator-7777fb866f-vjg2l\" (UID: \"c4376008-2a48-499e-a548-0b5f233e3af7\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.309644 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.310143 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.310651 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-serving-cert\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.310979 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.311484 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.312267 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.313032 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.314437 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.314578 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-policies\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.314680 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-client\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.314804 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnzrn\" (UniqueName: \"kubernetes.io/projected/42a9c706-06f8-489f-8b1a-769b1101a2d7-kube-api-access-qnzrn\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.314920 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-client\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.315969 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.317498 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-config\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.318274 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319139 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-serving-ca\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319149 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319365 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319375 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7aada3dd-f7ae-4129-b168-366122ad0ef1-auth-proxy-config\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319793 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/030235cf-11d2-431b-92f7-c46704c420e8-serving-cert\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.319999 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-trusted-ca-bundle\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.320024 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-audit-policies\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.321217 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwtz2\" (UniqueName: \"kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2\") pod \"route-controller-manager-6576b87f9c-xx2cp\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.321298 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.321468 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/030235cf-11d2-431b-92f7-c46704c420e8-service-ca-bundle\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.323723 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-encryption-config\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.326329 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.326481 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.328050 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-serving-cert\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.329400 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.330480 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/41509c7e-a96a-44ce-a24e-dbd2f80386b0-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.331147 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.333398 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.334880 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-etcd-client\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.338245 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/41509c7e-a96a-44ce-a24e-dbd2f80386b0-encryption-config\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.338862 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.344897 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.345321 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.345319 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-etcd-client\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.346174 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.347597 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.348050 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.348323 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.348897 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz2mk\" (UniqueName: \"kubernetes.io/projected/9424e0a3-86c1-49b3-a3b6-f599caf06e8b-kube-api-access-xz2mk\") pod \"cluster-image-registry-operator-dc59b4c8b-zdp7w\" (UID: \"9424e0a3-86c1-49b3-a3b6-f599caf06e8b\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.349902 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.351285 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.351874 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.353235 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.354430 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wrh4k"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.354664 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.355208 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrnhz"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.357249 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.358009 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.360676 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.361065 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.361346 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.361601 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.362449 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.371758 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.371830 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.372486 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.373588 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.373783 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.374098 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.374765 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.375020 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.376515 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.376881 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8mddw"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.377421 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xmsj9"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.377450 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-j6df8"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.377862 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.377948 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.378076 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.378172 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.378334 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.378512 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.379337 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.379614 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-gbxpn"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.380508 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.381339 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.381584 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.382237 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.383402 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.384147 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.384899 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.385421 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.386782 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.388187 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.388812 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-b8p7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.390350 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xnxsr"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.391888 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.394136 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9hgnx"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.394376 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.395220 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tb9s5"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.395788 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.396782 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-b4hpk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.398698 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.399548 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.400227 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.402764 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.404190 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.405253 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.405390 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.407017 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.408872 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.410555 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.411549 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.413809 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.413858 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-dsbk6"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.419898 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-service-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.419986 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2z8l\" (UniqueName: \"kubernetes.io/projected/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-kube-api-access-c2z8l\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420104 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79xpr\" (UniqueName: \"kubernetes.io/projected/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-kube-api-access-79xpr\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420160 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-service-ca-bundle\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420239 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-stats-auth\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420268 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420319 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znvql\" (UniqueName: \"kubernetes.io/projected/ea760b21-fb4c-4e8a-9d07-094885818cac-kube-api-access-znvql\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420396 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-client\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420464 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420498 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-metrics-certs\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420574 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42a9c706-06f8-489f-8b1a-769b1101a2d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420627 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwnwm\" (UniqueName: \"kubernetes.io/projected/254f6b75-68d4-421f-81a4-1d78e32db94b-kube-api-access-zwnwm\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420759 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42a9c706-06f8-489f-8b1a-769b1101a2d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.420813 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-serving-cert\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421011 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421083 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421126 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421151 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-default-certificate\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421260 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421346 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnzrn\" (UniqueName: \"kubernetes.io/projected/42a9c706-06f8-489f-8b1a-769b1101a2d7-kube-api-access-qnzrn\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421396 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-metrics-tls\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421438 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc715eca-0758-4aa8-90e6-59b28717a44c-config-volume\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421492 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gj7kf\" (UniqueName: \"kubernetes.io/projected/51627ad8-bcac-4c0e-934d-b99aa94b87ca-kube-api-access-gj7kf\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421563 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w2s7\" (UniqueName: \"kubernetes.io/projected/bc715eca-0758-4aa8-90e6-59b28717a44c-kube-api-access-5w2s7\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421613 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-srv-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421661 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-config\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421684 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc715eca-0758-4aa8-90e6-59b28717a44c-metrics-tls\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.421710 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-config\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.422114 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.422288 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.425085 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42a9c706-06f8-489f-8b1a-769b1101a2d7-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.426725 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.429044 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.434057 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.435396 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42a9c706-06f8-489f-8b1a-769b1101a2d7-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.438332 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.439004 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.439304 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.444539 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.449181 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j6df8"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.452424 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8mddw"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.452458 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.455551 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.456066 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.457750 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.458929 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.459031 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.459907 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.460970 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tb9s5"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.462220 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-dsbk6"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.467419 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-config\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.480852 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.499879 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.515077 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-serving-cert\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.519789 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523366 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-config\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523424 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2z8l\" (UniqueName: \"kubernetes.io/projected/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-kube-api-access-c2z8l\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523462 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79xpr\" (UniqueName: \"kubernetes.io/projected/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-kube-api-access-79xpr\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523484 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-service-ca-bundle\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523507 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-stats-auth\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523527 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523550 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znvql\" (UniqueName: \"kubernetes.io/projected/ea760b21-fb4c-4e8a-9d07-094885818cac-kube-api-access-znvql\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523576 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523602 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-metrics-certs\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523649 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwnwm\" (UniqueName: \"kubernetes.io/projected/254f6b75-68d4-421f-81a4-1d78e32db94b-kube-api-access-zwnwm\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523764 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523820 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523869 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-default-certificate\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523894 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.523929 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-metrics-tls\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.524001 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-srv-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.527240 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-client\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.541147 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.544023 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.562549 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.594873 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/51627ad8-bcac-4c0e-934d-b99aa94b87ca-etcd-service-ca\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.596858 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.598366 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.620088 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.632595 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/bc715eca-0758-4aa8-90e6-59b28717a44c-metrics-tls\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.640053 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.651041 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bc715eca-0758-4aa8-90e6-59b28717a44c-config-volume\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.661148 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.679853 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.704843 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.720628 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.736932 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-stats-auth\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.738979 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-metrics-certs\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.739386 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.759655 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.779751 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.799379 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.810541 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-default-certificate\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.819531 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2rcjc"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.821473 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: W0126 00:09:18.832053 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64ca38fe_db66_4303_a97e_77b7fa4c6214.slice/crio-0ee1f21663d073e59fa38233abd9f81904e620d0532e9911036769103bdc58ee WatchSource:0}: Error finding container 0ee1f21663d073e59fa38233abd9f81904e620d0532e9911036769103bdc58ee: Status 404 returned error can't find the container with id 0ee1f21663d073e59fa38233abd9f81904e620d0532e9911036769103bdc58ee Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.839178 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.845918 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-service-ca-bundle\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.873994 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-xh2mk"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.874099 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" event={"ID":"64ca38fe-db66-4303-a97e-77b7fa4c6214","Type":"ContainerStarted","Data":"0ee1f21663d073e59fa38233abd9f81904e620d0532e9911036769103bdc58ee"} Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.880831 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67hdw\" (UniqueName: \"kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw\") pod \"oauth-openshift-558db77b4-xrnhz\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.893209 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.903467 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlnxj\" (UniqueName: \"kubernetes.io/projected/efda37e1-c7f8-4e41-a9a4-e5191fe797ab-kube-api-access-xlnxj\") pod \"apiserver-76f77b778f-xmsj9\" (UID: \"efda37e1-c7f8-4e41-a9a4-e5191fe797ab\") " pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.908886 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.918692 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2smpc\" (UniqueName: \"kubernetes.io/projected/c1bf1b13-966b-4a74-935c-47af817d7777-kube-api-access-2smpc\") pod \"downloads-7954f5f757-cbkrr\" (UID: \"c1bf1b13-966b-4a74-935c-47af817d7777\") " pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:18 crc kubenswrapper[4975]: W0126 00:09:18.919699 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4376008_2a48_499e_a548_0b5f233e3af7.slice/crio-64b837cb22e34ca6061289ad685650cfa98a4833ec1933fe9d26cca57fc6fdfd WatchSource:0}: Error finding container 64b837cb22e34ca6061289ad685650cfa98a4833ec1933fe9d26cca57fc6fdfd: Status 404 returned error can't find the container with id 64b837cb22e34ca6061289ad685650cfa98a4833ec1933fe9d26cca57fc6fdfd Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.921134 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.939946 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.947820 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.959688 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.962333 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh"] Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.968160 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-metrics-tls\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:18 crc kubenswrapper[4975]: I0126 00:09:18.979602 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.014488 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8smfs\" (UniqueName: \"kubernetes.io/projected/030235cf-11d2-431b-92f7-c46704c420e8-kube-api-access-8smfs\") pod \"authentication-operator-69f744f599-b8p7w\" (UID: \"030235cf-11d2-431b-92f7-c46704c420e8\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.062651 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf9vp\" (UniqueName: \"kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp\") pod \"image-pruner-29489760-c9d48\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.065898 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.076996 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptt94\" (UniqueName: \"kubernetes.io/projected/7aada3dd-f7ae-4129-b168-366122ad0ef1-kube-api-access-ptt94\") pod \"machine-approver-56656f9798-q6z7v\" (UID: \"7aada3dd-f7ae-4129-b168-366122ad0ef1\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.082977 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.086784 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.101349 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.121133 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.122955 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-wrh4k"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.124580 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.132022 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.143945 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.163983 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.167032 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.180852 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqlth\" (UniqueName: \"kubernetes.io/projected/41509c7e-a96a-44ce-a24e-dbd2f80386b0-kube-api-access-gqlth\") pod \"apiserver-7bbb656c7d-g7tn2\" (UID: \"41509c7e-a96a-44ce-a24e-dbd2f80386b0\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.182140 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.195433 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-xmsj9"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.201352 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.205946 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.224995 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrnhz"] Jan 26 00:09:19 crc kubenswrapper[4975]: W0126 00:09:19.225084 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod557733ab_6df8_42b0_893e_a10f05e34f2d.slice/crio-84910189e1e1311ab6731b789daae59be20e5e03c466e590cded9013b05bf524 WatchSource:0}: Error finding container 84910189e1e1311ab6731b789daae59be20e5e03c466e590cded9013b05bf524: Status 404 returned error can't find the container with id 84910189e1e1311ab6731b789daae59be20e5e03c466e590cded9013b05bf524 Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.227151 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.239156 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.240315 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.245836 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-config\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.281525 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.307072 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.307406 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.328351 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.343818 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29489760-c9d48"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.344354 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.359080 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.372639 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.377829 4975 request.go:700] Waited for 1.019469627s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-storage-version-migrator-operator/secrets?fieldSelector=metadata.name%3Dserving-cert&limit=500&resourceVersion=0 Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.379825 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.380366 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.399238 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.418492 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.437182 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-cbkrr"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.438817 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.454822 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-srv-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.463319 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.472830 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/254f6b75-68d4-421f-81a4-1d78e32db94b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.478776 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.490121 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-b8p7w"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.500199 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: W0126 00:09:19.506858 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod030235cf_11d2_431b_92f7_c46704c420e8.slice/crio-8c00a08ddb96803891691b86a7cc7d1361b4f4a9ba480b33ca725b0971946835 WatchSource:0}: Error finding container 8c00a08ddb96803891691b86a7cc7d1361b4f4a9ba480b33ca725b0971946835: Status 404 returned error can't find the container with id 8c00a08ddb96803891691b86a7cc7d1361b4f4a9ba480b33ca725b0971946835 Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.519750 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 00:09:19 crc kubenswrapper[4975]: E0126 00:09:19.524148 4975 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Jan 26 00:09:19 crc kubenswrapper[4975]: E0126 00:09:19.524226 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token podName:ea760b21-fb4c-4e8a-9d07-094885818cac nodeName:}" failed. No retries permitted until 2026-01-26 00:09:20.024206809 +0000 UTC m=+144.145412303 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token") pod "machine-config-server-gbxpn" (UID: "ea760b21-fb4c-4e8a-9d07-094885818cac") : failed to sync secret cache: timed out waiting for the condition Jan 26 00:09:19 crc kubenswrapper[4975]: E0126 00:09:19.531096 4975 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Jan 26 00:09:19 crc kubenswrapper[4975]: E0126 00:09:19.531214 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs podName:ea760b21-fb4c-4e8a-9d07-094885818cac nodeName:}" failed. No retries permitted until 2026-01-26 00:09:20.03119042 +0000 UTC m=+144.152395914 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs") pod "machine-config-server-gbxpn" (UID: "ea760b21-fb4c-4e8a-9d07-094885818cac") : failed to sync secret cache: timed out waiting for the condition Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.539964 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.559993 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.580350 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.612196 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.619670 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.639676 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.662180 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.679248 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.712434 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.721022 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.739332 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2"] Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.739489 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.759930 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.780087 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.799163 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.819907 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.840152 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.859270 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.879849 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.884898 4975 generic.go:334] "Generic (PLEG): container finished" podID="efda37e1-c7f8-4e41-a9a4-e5191fe797ab" containerID="26978569204aa6d9b6ad35312156183a1b0c0ea68a2f8f409fbcea1f3cb34769" exitCode=0 Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.884966 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" event={"ID":"efda37e1-c7f8-4e41-a9a4-e5191fe797ab","Type":"ContainerDied","Data":"26978569204aa6d9b6ad35312156183a1b0c0ea68a2f8f409fbcea1f3cb34769"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.884996 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" event={"ID":"efda37e1-c7f8-4e41-a9a4-e5191fe797ab","Type":"ContainerStarted","Data":"e8e11edbc5836453dd53f97510a2540d1e1c813db591d4cd730f257780f2935b"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.890937 4975 generic.go:334] "Generic (PLEG): container finished" podID="c4376008-2a48-499e-a548-0b5f233e3af7" containerID="12f37afb2fc9712b9f1c5b3977e14ca03b1a6845dd03c2bd836e7629e8f49d70" exitCode=0 Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.890998 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" event={"ID":"c4376008-2a48-499e-a548-0b5f233e3af7","Type":"ContainerDied","Data":"12f37afb2fc9712b9f1c5b3977e14ca03b1a6845dd03c2bd836e7629e8f49d70"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.891020 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" event={"ID":"c4376008-2a48-499e-a548-0b5f233e3af7","Type":"ContainerStarted","Data":"64b837cb22e34ca6061289ad685650cfa98a4833ec1933fe9d26cca57fc6fdfd"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.895309 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" event={"ID":"557733ab-6df8-42b0-893e-a10f05e34f2d","Type":"ContainerStarted","Data":"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.895342 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" event={"ID":"557733ab-6df8-42b0-893e-a10f05e34f2d","Type":"ContainerStarted","Data":"84910189e1e1311ab6731b789daae59be20e5e03c466e590cded9013b05bf524"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.897159 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.897245 4975 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-xx2cp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.897280 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.899212 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.900159 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" event={"ID":"030235cf-11d2-431b-92f7-c46704c420e8","Type":"ContainerStarted","Data":"0dab723402a5ebb32f2dd9f711776e4dc4fe76748141bd47261aa0e1ed52d5e1"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.900191 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" event={"ID":"030235cf-11d2-431b-92f7-c46704c420e8","Type":"ContainerStarted","Data":"8c00a08ddb96803891691b86a7cc7d1361b4f4a9ba480b33ca725b0971946835"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.907675 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" event={"ID":"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6","Type":"ContainerStarted","Data":"e0c4c515131fb556afbd6f85ad2f65a0eafc975a0ed2b7277139f99b2c42d149"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.907715 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" event={"ID":"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6","Type":"ContainerStarted","Data":"f6e55e3795ccd3d92079bf741db60dc9f0bf6a39bad12f6de0851a959c0aba07"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.907725 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" event={"ID":"be0d8f61-b7b4-48cf-a6cb-6780df7d99d6","Type":"ContainerStarted","Data":"7b4148c3f94bdf3a1acf40997f525d8395ba71f6fe2914b8da995070e807bc7d"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.913610 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" event={"ID":"19f40110-25a0-41cb-b740-67d93659b7dc","Type":"ContainerStarted","Data":"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.913653 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" event={"ID":"19f40110-25a0-41cb-b740-67d93659b7dc","Type":"ContainerStarted","Data":"5654e1e369de0a21f085ebb73818b5497d83f4bc745246fdddaa9a01e89be36f"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.915026 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.916184 4975 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-krk6p container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.916221 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.918583 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.923548 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" event={"ID":"64ca38fe-db66-4303-a97e-77b7fa4c6214","Type":"ContainerStarted","Data":"fd921ba738da92527869c21c4197756358311ecef2fdb09387e3d2157cc5f7bc"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.924418 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.927202 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wrh4k" event={"ID":"fe206c78-fea9-4b0e-b236-3e4bd73f1d13","Type":"ContainerStarted","Data":"4b6920075d11e8ab5ccc07f6d87e707982cabb781e7f91c526a49de2dd61b73d"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.927246 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-wrh4k" event={"ID":"fe206c78-fea9-4b0e-b236-3e4bd73f1d13","Type":"ContainerStarted","Data":"477cfb78ae1efc6d37f9cf751cce06c7fd3ffecc17863e63b79071c9c0504f54"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.929265 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" event={"ID":"9424e0a3-86c1-49b3-a3b6-f599caf06e8b","Type":"ContainerStarted","Data":"9e128fbe690974232132125f974ce92b4e9eb22806f7df340ea559ce24958c79"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.929377 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" event={"ID":"9424e0a3-86c1-49b3-a3b6-f599caf06e8b","Type":"ContainerStarted","Data":"94c1b4fafc62669b69755ad946bad98eb879ca4bb130168df828f88c8bf2101f"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.937092 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" event={"ID":"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9","Type":"ContainerStarted","Data":"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.937146 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" event={"ID":"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9","Type":"ContainerStarted","Data":"5e930fad3bf4b0782e6d15c46bec0dfa8366bafb54ffc7d22be418c815d3759a"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.937608 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.938915 4975 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-xrnhz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" start-of-body= Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.939035 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.14:6443/healthz\": dial tcp 10.217.0.14:6443: connect: connection refused" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.939697 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" event={"ID":"41509c7e-a96a-44ce-a24e-dbd2f80386b0","Type":"ContainerStarted","Data":"dac1700f771ba38bb559db53ce825c2b1e4578703fb88a9440e06b077f549213"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.941727 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" event={"ID":"7aada3dd-f7ae-4129-b168-366122ad0ef1","Type":"ContainerStarted","Data":"9ba481f4abbfdc1fc2434d32920b0026e94bf54c235b3e4c58884eaf47cad712"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.942256 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" event={"ID":"7aada3dd-f7ae-4129-b168-366122ad0ef1","Type":"ContainerStarted","Data":"8d2cbaf3b269772b24736da0d6ebdc7e4fdf974e78d7468a96872ab380d785eb"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.942838 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.950471 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" event={"ID":"48682975-6b38-48ad-abbd-b1c89ac4bbd9","Type":"ContainerStarted","Data":"ba7a3a4eb743ca4869eb35b91388ea81e398b8092564fe68fa4c46d51806af35"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.950510 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" event={"ID":"48682975-6b38-48ad-abbd-b1c89ac4bbd9","Type":"ContainerStarted","Data":"512d803c8450c3b00d8ad00c6e153b7b8e427d6239bdbdb73fc8f5bf3a8d0566"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.956712 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cbkrr" event={"ID":"c1bf1b13-966b-4a74-935c-47af817d7777","Type":"ContainerStarted","Data":"7342ba0621a795142529058f67ea9198969cfce487c8820737a7a88c67fe11b9"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.956768 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-cbkrr" event={"ID":"c1bf1b13-966b-4a74-935c-47af817d7777","Type":"ContainerStarted","Data":"e0931adf9e0576175e5da3f521d1d8c99333491221869a8e74ca6f2682b6fb5c"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.958196 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.959481 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.959527 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.959956 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.960189 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.967636 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29489760-c9d48" event={"ID":"e127f059-17d0-4da8-899b-94674c0ff01f","Type":"ContainerStarted","Data":"a80ad68f393a6b165d9de3d05159ee29e83862fa018f8e2c1588497dca848d51"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.967694 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29489760-c9d48" event={"ID":"e127f059-17d0-4da8-899b-94674c0ff01f","Type":"ContainerStarted","Data":"35412c04d0f6db08792428eca5e4d88dbe2049be446130a62bade1a54f238c87"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.973079 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" event={"ID":"d50d7251-c487-444f-ae08-3713d8ea5ce3","Type":"ContainerStarted","Data":"398f0a2be0e59f5c0b215901e78ca322359b34c567c6043e9e5af5bfc5be9527"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.973118 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" event={"ID":"d50d7251-c487-444f-ae08-3713d8ea5ce3","Type":"ContainerStarted","Data":"1b2c04be60eb58608f9281e3363a270484020d1ca0ee82044a1aa214754e922d"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.973132 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" event={"ID":"d50d7251-c487-444f-ae08-3713d8ea5ce3","Type":"ContainerStarted","Data":"f9e5106e241f22c5910297e4b44818a1e660471e1c351ee12dde026a2ed79070"} Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.979704 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 00:09:19 crc kubenswrapper[4975]: I0126 00:09:19.999303 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.019175 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.043057 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.058975 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.080626 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.103818 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.112550 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.112618 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.118801 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.126652 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-certs\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.128194 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ea760b21-fb4c-4e8a-9d07-094885818cac-node-bootstrap-token\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.140610 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.158802 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.180314 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.202138 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.219881 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.241851 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.263796 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.287835 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.302618 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.339914 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.361620 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.379539 4975 request.go:700] Waited for 1.983448747s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca/secrets?fieldSelector=metadata.name%3Dservice-ca-dockercfg-pn86c&limit=500&resourceVersion=0 Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.381216 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.398822 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.419109 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.439667 4975 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.483953 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w2s7\" (UniqueName: \"kubernetes.io/projected/bc715eca-0758-4aa8-90e6-59b28717a44c-kube-api-access-5w2s7\") pod \"dns-default-9hgnx\" (UID: \"bc715eca-0758-4aa8-90e6-59b28717a44c\") " pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.491195 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.498283 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.522001 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.540814 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gj7kf\" (UniqueName: \"kubernetes.io/projected/51627ad8-bcac-4c0e-934d-b99aa94b87ca-kube-api-access-gj7kf\") pod \"etcd-operator-b45778765-b4hpk\" (UID: \"51627ad8-bcac-4c0e-934d-b99aa94b87ca\") " pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.576882 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnzrn\" (UniqueName: \"kubernetes.io/projected/42a9c706-06f8-489f-8b1a-769b1101a2d7-kube-api-access-qnzrn\") pod \"openshift-controller-manager-operator-756b6f6bc6-m8qff\" (UID: \"42a9c706-06f8-489f-8b1a-769b1101a2d7\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.583596 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2z8l\" (UniqueName: \"kubernetes.io/projected/ab6d30fc-43fe-46af-8d7c-e67f3b3811d2-kube-api-access-c2z8l\") pod \"router-default-5444994796-6hw84\" (UID: \"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2\") " pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.619347 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79xpr\" (UniqueName: \"kubernetes.io/projected/c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa-kube-api-access-79xpr\") pod \"dns-operator-744455d44c-xnxsr\" (UID: \"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa\") " pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.632878 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwnwm\" (UniqueName: \"kubernetes.io/projected/254f6b75-68d4-421f-81a4-1d78e32db94b-kube-api-access-zwnwm\") pod \"olm-operator-6b444d44fb-zdfmk\" (UID: \"254f6b75-68d4-421f-81a4-1d78e32db94b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.659268 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znvql\" (UniqueName: \"kubernetes.io/projected/ea760b21-fb4c-4e8a-9d07-094885818cac-kube-api-access-znvql\") pod \"machine-config-server-gbxpn\" (UID: \"ea760b21-fb4c-4e8a-9d07-094885818cac\") " pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.674523 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b5e1ff61-8c29-474a-bf64-f9a09b1d166e-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-65wb9\" (UID: \"b5e1ff61-8c29-474a-bf64-f9a09b1d166e\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.754443 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.754455 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.754574 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkg9s\" (UniqueName: \"kubernetes.io/projected/2b324e48-dc6f-47db-acb5-9b590fe869cb-kube-api-access-bkg9s\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.754985 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e916258-6c75-413f-9d82-2ef568aa3647-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755020 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc2nr\" (UniqueName: \"kubernetes.io/projected/c4e5fa00-3bad-48f8-930e-a81d2ffb696b-kube-api-access-rc2nr\") pod \"migrator-59844c95c7-8dkw2\" (UID: \"c4e5fa00-3bad-48f8-930e-a81d2ffb696b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755065 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e553fc00-cc5f-41e7-aeae-99be7ec861d4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755115 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-profile-collector-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755343 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755370 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d936082-c3b2-4117-9952-638b630b653a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755410 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86d89d82-a826-4586-b7fb-66866bf100e9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755435 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755495 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lc5k\" (UniqueName: \"kubernetes.io/projected/3c669669-d4d5-42bf-969b-02661b7cf7a2-kube-api-access-8lc5k\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755691 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-serving-cert\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755749 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr7mw\" (UniqueName: \"kubernetes.io/projected/2ead6744-6221-4da7-b612-c1e59bab87e5-kube-api-access-pr7mw\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755795 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-webhook-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755845 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-config\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.755891 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t75h8\" (UniqueName: \"kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756073 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z72s\" (UniqueName: \"kubernetes.io/projected/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-kube-api-access-5z72s\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756100 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72455538-0f4e-4b90-a4e8-3c990f4b89df-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756174 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72455538-0f4e-4b90-a4e8-3c990f4b89df-config\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756199 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756393 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756426 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756457 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756481 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gmwj\" (UniqueName: \"kubernetes.io/projected/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-kube-api-access-5gmwj\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756525 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756835 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756871 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756888 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-srv-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756927 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhrs4\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756948 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vlqg2\" (UniqueName: \"kubernetes.io/projected/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-kube-api-access-vlqg2\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.756969 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-images\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757124 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ead6744-6221-4da7-b612-c1e59bab87e5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757172 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsdc7\" (UniqueName: \"kubernetes.io/projected/e553fc00-cc5f-41e7-aeae-99be7ec861d4-kube-api-access-xsdc7\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757205 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757227 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e916258-6c75-413f-9d82-2ef568aa3647-proxy-tls\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757271 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72455538-0f4e-4b90-a4e8-3c990f4b89df-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757517 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2twn\" (UniqueName: \"kubernetes.io/projected/eeb5c859-42f1-4c67-a8a2-18635447eb4d-kube-api-access-c2twn\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757546 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-cert\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757573 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2b324e48-dc6f-47db-acb5-9b590fe869cb-tmpfs\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757596 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757616 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bqbpq\" (UniqueName: \"kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757815 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757841 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757888 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757907 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d936082-c3b2-4117-9952-638b630b653a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.757940 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758071 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86d89d82-a826-4586-b7fb-66866bf100e9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758103 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758161 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc49c\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-kube-api-access-vc49c\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758180 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-apiservice-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758199 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pjt2z\" (UniqueName: \"kubernetes.io/projected/3e916258-6c75-413f-9d82-2ef568aa3647-kube-api-access-pjt2z\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758325 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs4lf\" (UniqueName: \"kubernetes.io/projected/86d89d82-a826-4586-b7fb-66866bf100e9-kube-api-access-vs4lf\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.758358 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eeb5c859-42f1-4c67-a8a2-18635447eb4d-proxy-tls\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: E0126 00:09:20.778416 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.278396425 +0000 UTC m=+145.399601909 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.782639 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.783645 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.801483 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.814544 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859454 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859685 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859708 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859776 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gmwj\" (UniqueName: \"kubernetes.io/projected/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-kube-api-access-5gmwj\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859798 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-csi-data-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859818 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859835 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859859 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859877 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-srv-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.859983 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhrs4\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860004 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vlqg2\" (UniqueName: \"kubernetes.io/projected/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-kube-api-access-vlqg2\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860023 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-cabundle\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860055 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-images\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860070 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ead6744-6221-4da7-b612-c1e59bab87e5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860117 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsdc7\" (UniqueName: \"kubernetes.io/projected/e553fc00-cc5f-41e7-aeae-99be7ec861d4-kube-api-access-xsdc7\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860150 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860173 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e916258-6c75-413f-9d82-2ef568aa3647-proxy-tls\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860191 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72455538-0f4e-4b90-a4e8-3c990f4b89df-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860245 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2twn\" (UniqueName: \"kubernetes.io/projected/eeb5c859-42f1-4c67-a8a2-18635447eb4d-kube-api-access-c2twn\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860266 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-cert\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860282 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2b324e48-dc6f-47db-acb5-9b590fe869cb-tmpfs\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860326 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860343 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bqbpq\" (UniqueName: \"kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860359 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860377 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860401 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-key\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860428 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860446 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d936082-c3b2-4117-9952-638b630b653a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860471 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860487 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86d89d82-a826-4586-b7fb-66866bf100e9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860505 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860604 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc49c\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-kube-api-access-vc49c\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860620 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-apiservice-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860639 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pjt2z\" (UniqueName: \"kubernetes.io/projected/3e916258-6c75-413f-9d82-2ef568aa3647-kube-api-access-pjt2z\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860657 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs4lf\" (UniqueName: \"kubernetes.io/projected/86d89d82-a826-4586-b7fb-66866bf100e9-kube-api-access-vs4lf\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860693 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eeb5c859-42f1-4c67-a8a2-18635447eb4d-proxy-tls\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860711 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860743 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkg9s\" (UniqueName: \"kubernetes.io/projected/2b324e48-dc6f-47db-acb5-9b590fe869cb-kube-api-access-bkg9s\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860762 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e916258-6c75-413f-9d82-2ef568aa3647-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860780 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc2nr\" (UniqueName: \"kubernetes.io/projected/c4e5fa00-3bad-48f8-930e-a81d2ffb696b-kube-api-access-rc2nr\") pod \"migrator-59844c95c7-8dkw2\" (UID: \"c4e5fa00-3bad-48f8-930e-a81d2ffb696b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860835 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e553fc00-cc5f-41e7-aeae-99be7ec861d4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860873 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-profile-collector-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860898 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860914 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d936082-c3b2-4117-9952-638b630b653a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860933 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-socket-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860973 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86d89d82-a826-4586-b7fb-66866bf100e9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.860990 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-registration-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861026 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861060 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lc5k\" (UniqueName: \"kubernetes.io/projected/3c669669-d4d5-42bf-969b-02661b7cf7a2-kube-api-access-8lc5k\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861077 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-serving-cert\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861113 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr7mw\" (UniqueName: \"kubernetes.io/projected/2ead6744-6221-4da7-b612-c1e59bab87e5-kube-api-access-pr7mw\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861145 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-webhook-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861174 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-config\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861229 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t75h8\" (UniqueName: \"kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861248 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z72s\" (UniqueName: \"kubernetes.io/projected/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-kube-api-access-5z72s\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861267 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpmlf\" (UniqueName: \"kubernetes.io/projected/cfe2bc53-bda8-4d4e-8da5-521f3821826c-kube-api-access-dpmlf\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861321 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72455538-0f4e-4b90-a4e8-3c990f4b89df-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861339 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-plugins-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861405 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72455538-0f4e-4b90-a4e8-3c990f4b89df-config\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861423 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd9kx\" (UniqueName: \"kubernetes.io/projected/bb028166-02f4-416d-a53e-1e96d3ad062c-kube-api-access-jd9kx\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861439 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.861455 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-mountpoint-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: E0126 00:09:20.862957 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.362913482 +0000 UTC m=+145.484118976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.863169 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.864412 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.864471 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/eeb5c859-42f1-4c67-a8a2-18635447eb4d-images\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.865165 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.870772 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72455538-0f4e-4b90-a4e8-3c990f4b89df-config\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.870776 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/2b324e48-dc6f-47db-acb5-9b590fe869cb-tmpfs\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.871278 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.874037 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.874492 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.876858 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3e916258-6c75-413f-9d82-2ef568aa3647-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.877058 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eeb5c859-42f1-4c67-a8a2-18635447eb4d-proxy-tls\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.877286 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/86d89d82-a826-4586-b7fb-66866bf100e9-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.877439 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2ead6744-6221-4da7-b612-c1e59bab87e5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.877945 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-cert\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.878866 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-config\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.878927 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.880674 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.881546 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.882429 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/86d89d82-a826-4586-b7fb-66866bf100e9-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.884242 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d936082-c3b2-4117-9952-638b630b653a-trusted-ca\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.884314 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.884668 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/3d936082-c3b2-4117-9952-638b630b653a-metrics-tls\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.884916 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.886096 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.886539 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-serving-cert\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.890287 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-profile-collector-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.890292 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.890744 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e553fc00-cc5f-41e7-aeae-99be7ec861d4-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.892719 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72455538-0f4e-4b90-a4e8-3c990f4b89df-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.893915 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-apiservice-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.898062 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3c669669-d4d5-42bf-969b-02661b7cf7a2-srv-cert\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.898607 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2b324e48-dc6f-47db-acb5-9b590fe869cb-webhook-cert\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.904006 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vlqg2\" (UniqueName: \"kubernetes.io/projected/1ed800e9-7ccf-41d7-802e-3fb96798ed9d-kube-api-access-vlqg2\") pod \"package-server-manager-789f6589d5-l9vhw\" (UID: \"1ed800e9-7ccf-41d7-802e-3fb96798ed9d\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.907358 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs4lf\" (UniqueName: \"kubernetes.io/projected/86d89d82-a826-4586-b7fb-66866bf100e9-kube-api-access-vs4lf\") pod \"kube-storage-version-migrator-operator-b67b599dd-d9jj5\" (UID: \"86d89d82-a826-4586-b7fb-66866bf100e9\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.915797 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.916376 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3e916258-6c75-413f-9d82-2ef568aa3647-proxy-tls\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.925616 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsdc7\" (UniqueName: \"kubernetes.io/projected/e553fc00-cc5f-41e7-aeae-99be7ec861d4-kube-api-access-xsdc7\") pod \"control-plane-machine-set-operator-78cbb6b69f-cctnf\" (UID: \"e553fc00-cc5f-41e7-aeae-99be7ec861d4\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.927110 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhrs4\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.944580 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.960627 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gbxpn" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963032 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd9kx\" (UniqueName: \"kubernetes.io/projected/bb028166-02f4-416d-a53e-1e96d3ad062c-kube-api-access-jd9kx\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963085 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-mountpoint-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963120 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963150 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-csi-data-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963185 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-cabundle\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963262 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-key\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963340 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-socket-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963361 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-registration-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963453 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpmlf\" (UniqueName: \"kubernetes.io/projected/cfe2bc53-bda8-4d4e-8da5-521f3821826c-kube-api-access-dpmlf\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963486 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-plugins-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.963910 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-plugins-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.964489 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-mountpoint-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: E0126 00:09:20.964793 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.464778979 +0000 UTC m=+145.585984473 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.965118 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-csi-data-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.966216 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-cabundle\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.966589 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pjt2z\" (UniqueName: \"kubernetes.io/projected/3e916258-6c75-413f-9d82-2ef568aa3647-kube-api-access-pjt2z\") pod \"machine-config-controller-84d6567774-fpdjm\" (UID: \"3e916258-6c75-413f-9d82-2ef568aa3647\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.967007 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-registration-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.967098 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/bb028166-02f4-416d-a53e-1e96d3ad062c-socket-dir\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.969998 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/cfe2bc53-bda8-4d4e-8da5-521f3821826c-signing-key\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.974407 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gmwj\" (UniqueName: \"kubernetes.io/projected/cc3f2a80-2278-4d49-9bfc-6a5cb026394a-kube-api-access-5gmwj\") pod \"service-ca-operator-777779d784-vfc7w\" (UID: \"cc3f2a80-2278-4d49-9bfc-6a5cb026394a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.990984 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" Jan 26 00:09:20 crc kubenswrapper[4975]: I0126 00:09:20.997297 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2twn\" (UniqueName: \"kubernetes.io/projected/eeb5c859-42f1-4c67-a8a2-18635447eb4d-kube-api-access-c2twn\") pod \"machine-config-operator-74547568cd-zjr5q\" (UID: \"eeb5c859-42f1-4c67-a8a2-18635447eb4d\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.007654 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/72455538-0f4e-4b90-a4e8-3c990f4b89df-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-8c46q\" (UID: \"72455538-0f4e-4b90-a4e8-3c990f4b89df\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.009409 4975 generic.go:334] "Generic (PLEG): container finished" podID="41509c7e-a96a-44ce-a24e-dbd2f80386b0" containerID="696066b21dadc72152f70797144a8485e28847d615c0d487e581507718624fdc" exitCode=0 Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.009569 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" event={"ID":"41509c7e-a96a-44ce-a24e-dbd2f80386b0","Type":"ContainerDied","Data":"696066b21dadc72152f70797144a8485e28847d615c0d487e581507718624fdc"} Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.011680 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9hgnx"] Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.034023 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" event={"ID":"7aada3dd-f7ae-4129-b168-366122ad0ef1","Type":"ContainerStarted","Data":"cf0e67cabe732811fa475af4d3d2e01624df9013fe937aa8491eabe46f4aa962"} Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.049137 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.052350 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lc5k\" (UniqueName: \"kubernetes.io/projected/3c669669-d4d5-42bf-969b-02661b7cf7a2-kube-api-access-8lc5k\") pod \"catalog-operator-68c6474976-pdn2l\" (UID: \"3c669669-d4d5-42bf-969b-02661b7cf7a2\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.055369 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc49c\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-kube-api-access-vc49c\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.059714 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.066188 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.066450 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.566412211 +0000 UTC m=+145.687617755 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: W0126 00:09:21.085335 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea760b21_fb4c_4e8a_9d07_094885818cac.slice/crio-2507e086d43b9c31d10f004ef3ef1a9b8d242d59279d9a0a88c5d55c1bfe8c3d WatchSource:0}: Error finding container 2507e086d43b9c31d10f004ef3ef1a9b8d242d59279d9a0a88c5d55c1bfe8c3d: Status 404 returned error can't find the container with id 2507e086d43b9c31d10f004ef3ef1a9b8d242d59279d9a0a88c5d55c1bfe8c3d Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.086422 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.086783 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" event={"ID":"efda37e1-c7f8-4e41-a9a4-e5191fe797ab","Type":"ContainerStarted","Data":"b0fa83b918e58692c13449107fc46c43ebf7f665fd5ba6b6e9501616253addb6"} Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.086826 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" event={"ID":"efda37e1-c7f8-4e41-a9a4-e5191fe797ab","Type":"ContainerStarted","Data":"79e6eb811c6113543f1f12b2e3a5f40372e6fb737534158ec3f9ab5b74809c0a"} Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.092191 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" event={"ID":"c4376008-2a48-499e-a548-0b5f233e3af7","Type":"ContainerStarted","Data":"f15726aff5d53ad0ecf278b469d0fb7897090fc0a56a6593e981399bc4994720"} Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.092226 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.096545 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.096583 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.101450 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d936082-c3b2-4117-9952-638b630b653a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-gqttk\" (UID: \"3d936082-c3b2-4117-9952-638b630b653a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.102317 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.105785 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.111509 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.142948 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bqbpq\" (UniqueName: \"kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq\") pod \"collect-profiles-29489760-6nbzp\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.157374 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6a72f0b8-41a1-46ab-8ad4-d0448b255e6d-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-d8v59\" (UID: \"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.168409 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.182145 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.682127897 +0000 UTC m=+145.803333391 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.204340 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.208232 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkg9s\" (UniqueName: \"kubernetes.io/projected/2b324e48-dc6f-47db-acb5-9b590fe869cb-kube-api-access-bkg9s\") pod \"packageserver-d55dfcdfc-ts2pk\" (UID: \"2b324e48-dc6f-47db-acb5-9b590fe869cb\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.218036 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.218727 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t75h8\" (UniqueName: \"kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8\") pod \"marketplace-operator-79b997595-8xrbd\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.219587 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.225521 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.241762 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.241766 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc2nr\" (UniqueName: \"kubernetes.io/projected/c4e5fa00-3bad-48f8-930e-a81d2ffb696b-kube-api-access-rc2nr\") pod \"migrator-59844c95c7-8dkw2\" (UID: \"c4e5fa00-3bad-48f8-930e-a81d2ffb696b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.245632 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr7mw\" (UniqueName: \"kubernetes.io/projected/2ead6744-6221-4da7-b612-c1e59bab87e5-kube-api-access-pr7mw\") pod \"multus-admission-controller-857f4d67dd-8mddw\" (UID: \"2ead6744-6221-4da7-b612-c1e59bab87e5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.269717 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.270351 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.770331079 +0000 UTC m=+145.891536573 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.280559 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z72s\" (UniqueName: \"kubernetes.io/projected/fcabeb27-81a2-4c9c-9f7f-f6883e3f4724-kube-api-access-5z72s\") pod \"ingress-canary-j6df8\" (UID: \"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724\") " pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.286971 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd9kx\" (UniqueName: \"kubernetes.io/projected/bb028166-02f4-416d-a53e-1e96d3ad062c-kube-api-access-jd9kx\") pod \"csi-hostpathplugin-dsbk6\" (UID: \"bb028166-02f4-416d-a53e-1e96d3ad062c\") " pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.287324 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.296718 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpmlf\" (UniqueName: \"kubernetes.io/projected/cfe2bc53-bda8-4d4e-8da5-521f3821826c-kube-api-access-dpmlf\") pod \"service-ca-9c57cc56f-tb9s5\" (UID: \"cfe2bc53-bda8-4d4e-8da5-521f3821826c\") " pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.325219 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.361802 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.368086 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.372594 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.391534 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.391898 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.89188546 +0000 UTC m=+146.013090954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.403382 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-j6df8" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.446022 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.455652 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.470562 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.491987 4975 csr.go:261] certificate signing request csr-jvh2d is approved, waiting to be issued Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.492713 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.493123 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:21.993108452 +0000 UTC m=+146.114313946 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.493970 4975 csr.go:257] certificate signing request csr-jvh2d is issued Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.595566 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.596809 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.096790931 +0000 UTC m=+146.217996425 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.679818 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-wrh4k" podStartSLOduration=126.679798543 podStartE2EDuration="2m6.679798543s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:21.678509153 +0000 UTC m=+145.799714647" watchObservedRunningTime="2026-01-26 00:09:21.679798543 +0000 UTC m=+145.801004037" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.696419 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.697019 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.19699806 +0000 UTC m=+146.318203554 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.767222 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-cbkrr" podStartSLOduration=126.767204847 podStartE2EDuration="2m6.767204847s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:21.765438646 +0000 UTC m=+145.886644140" watchObservedRunningTime="2026-01-26 00:09:21.767204847 +0000 UTC m=+145.888410331" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.798648 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.798946 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.298934408 +0000 UTC m=+146.420139902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.828990 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-gqm5h" podStartSLOduration=126.82897484 podStartE2EDuration="2m6.82897484s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:21.828025248 +0000 UTC m=+145.949230732" watchObservedRunningTime="2026-01-26 00:09:21.82897484 +0000 UTC m=+145.950180334" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.862547 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" podStartSLOduration=125.862531473 podStartE2EDuration="2m5.862531473s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:21.861553751 +0000 UTC m=+145.982759245" watchObservedRunningTime="2026-01-26 00:09:21.862531473 +0000 UTC m=+145.983736967" Jan 26 00:09:21 crc kubenswrapper[4975]: I0126 00:09:21.901772 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:21 crc kubenswrapper[4975]: E0126 00:09:21.902159 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.402143936 +0000 UTC m=+146.523349430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.005263 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.006113 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.506101401 +0000 UTC m=+146.627306885 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.038946 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" podStartSLOduration=127.038897617 podStartE2EDuration="2m7.038897617s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.034921295 +0000 UTC m=+146.156126789" watchObservedRunningTime="2026-01-26 00:09:22.038897617 +0000 UTC m=+146.160103111" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.109294 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.109633 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.109705 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.110330 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.610301222 +0000 UTC m=+146.731506716 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.118032 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.118243 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.190497 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.216632 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6hw84" event={"ID":"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2","Type":"ContainerStarted","Data":"bba13daa4963e1a50a5fe4c73bcbb2014fe6a5434d67880dfddecd3a1f886c01"} Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.216709 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9hgnx" event={"ID":"bc715eca-0758-4aa8-90e6-59b28717a44c","Type":"ContainerStarted","Data":"6f1d7d70ce7a8761eabc3fb4a56272920927eb2cd77d308ae4c2be9b11d9d109"} Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.216851 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-b4hpk"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.221401 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.221534 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.222047 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.222402 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.722385174 +0000 UTC m=+146.843590668 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.236526 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.237424 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.247608 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-8r7mh" podStartSLOduration=127.247589405 podStartE2EDuration="2m7.247589405s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.233253195 +0000 UTC m=+146.354458689" watchObservedRunningTime="2026-01-26 00:09:22.247589405 +0000 UTC m=+146.368794899" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.254009 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" event={"ID":"41509c7e-a96a-44ce-a24e-dbd2f80386b0","Type":"ContainerStarted","Data":"501548abae16e1a9dbbc0626fad4a2c3d0ab01a7d1b2d8ff24dab46acfb33713"} Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.308846 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gbxpn" event={"ID":"ea760b21-fb4c-4e8a-9d07-094885818cac","Type":"ContainerStarted","Data":"45a29a66231f31f3b0bebce37d52683e2174fe69b8a698ab58faa89b018eaba3"} Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.308884 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gbxpn" event={"ID":"ea760b21-fb4c-4e8a-9d07-094885818cac","Type":"ContainerStarted","Data":"2507e086d43b9c31d10f004ef3ef1a9b8d242d59279d9a0a88c5d55c1bfe8c3d"} Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.308963 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.309040 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.325678 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.326795 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.826775479 +0000 UTC m=+146.947980963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.375925 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.397562 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" podStartSLOduration=127.397542099 podStartE2EDuration="2m7.397542099s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.379887752 +0000 UTC m=+146.501093236" watchObservedRunningTime="2026-01-26 00:09:22.397542099 +0000 UTC m=+146.518747593" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.400423 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.445174 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.447673 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:22.947658234 +0000 UTC m=+147.068863738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.474193 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.512385 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-26 00:04:21 +0000 UTC, rotation deadline is 2026-11-10 20:32:12.869556497 +0000 UTC Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.512418 4975 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6932h22m50.357141201s for next certificate rotation Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.513645 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-q6z7v" podStartSLOduration=127.513631774 podStartE2EDuration="2m7.513631774s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.511779311 +0000 UTC m=+146.632984795" watchObservedRunningTime="2026-01-26 00:09:22.513631774 +0000 UTC m=+146.634837268" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.532472 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.547526 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.547991 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.047972315 +0000 UTC m=+147.169177799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.638975 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zdp7w" podStartSLOduration=127.638951801 podStartE2EDuration="2m7.638951801s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.63328528 +0000 UTC m=+146.754490774" watchObservedRunningTime="2026-01-26 00:09:22.638951801 +0000 UTC m=+146.760157295" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.645176 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.649384 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.649724 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.149712099 +0000 UTC m=+147.270917593 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.665800 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29489760-c9d48" podStartSLOduration=127.665723608 podStartE2EDuration="2m7.665723608s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.665326949 +0000 UTC m=+146.786532443" watchObservedRunningTime="2026-01-26 00:09:22.665723608 +0000 UTC m=+146.786929102" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.709907 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-xnxsr"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.750831 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.751360 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.25134 +0000 UTC m=+147.372545484 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.781210 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5"] Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.786215 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-b8p7w" podStartSLOduration=127.786187923 podStartE2EDuration="2m7.786187923s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.776251984 +0000 UTC m=+146.897457468" watchObservedRunningTime="2026-01-26 00:09:22.786187923 +0000 UTC m=+146.907393417" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.811856 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.853593 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.856469 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.356445042 +0000 UTC m=+147.477650546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.884254 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" podStartSLOduration=127.884235312 podStartE2EDuration="2m7.884235312s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.882576784 +0000 UTC m=+147.003782288" watchObservedRunningTime="2026-01-26 00:09:22.884235312 +0000 UTC m=+147.005440806" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.950763 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2rcjc" podStartSLOduration=127.950716204 podStartE2EDuration="2m7.950716204s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:22.923818694 +0000 UTC m=+147.045024188" watchObservedRunningTime="2026-01-26 00:09:22.950716204 +0000 UTC m=+147.071921698" Jan 26 00:09:22 crc kubenswrapper[4975]: I0126 00:09:22.955237 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:22 crc kubenswrapper[4975]: E0126 00:09:22.955774 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.455715799 +0000 UTC m=+147.576921293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: W0126 00:09:23.053942 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc08c3cbe_5ec9_4aaa_a74a_bccfb6a427fa.slice/crio-d9599a28263a7f46817a84a3276a9ca4f65352e83433307416668bc5363d3116 WatchSource:0}: Error finding container d9599a28263a7f46817a84a3276a9ca4f65352e83433307416668bc5363d3116: Status 404 returned error can't find the container with id d9599a28263a7f46817a84a3276a9ca4f65352e83433307416668bc5363d3116 Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.059379 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.059826 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.559813488 +0000 UTC m=+147.681018982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.147680 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-xh2mk" podStartSLOduration=128.147647091 podStartE2EDuration="2m8.147647091s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.131003148 +0000 UTC m=+147.252208642" watchObservedRunningTime="2026-01-26 00:09:23.147647091 +0000 UTC m=+147.268852585" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.161169 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.162080 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.662057813 +0000 UTC m=+147.783263307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.266603 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.267141 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.767115374 +0000 UTC m=+147.888320868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.325965 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" event={"ID":"b5e1ff61-8c29-474a-bf64-f9a09b1d166e","Type":"ContainerStarted","Data":"a326498b58cb03c4a797979e76d3dc8672d670712231063047a373f7c2f451d5"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.329817 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" event={"ID":"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa","Type":"ContainerStarted","Data":"d9599a28263a7f46817a84a3276a9ca4f65352e83433307416668bc5363d3116"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.344963 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9hgnx" event={"ID":"bc715eca-0758-4aa8-90e6-59b28717a44c","Type":"ContainerStarted","Data":"acbe42646bb9be56525ddaedfd25e1628f4befb732bebbbc44e7fd1c4e923d23"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.345022 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9hgnx" event={"ID":"bc715eca-0758-4aa8-90e6-59b28717a44c","Type":"ContainerStarted","Data":"7185af6a2ba0a53d600d0fc35216f39e42722630bf9a2ed188867ab6d35d8f7a"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.345213 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.347445 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" event={"ID":"86d89d82-a826-4586-b7fb-66866bf100e9","Type":"ContainerStarted","Data":"a874fb9e017f43b03af907ce473c799027ed2920b03fa2022e31060eef974f08"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.349150 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" event={"ID":"42a9c706-06f8-489f-8b1a-769b1101a2d7","Type":"ContainerStarted","Data":"be5ef2abb76bf7fa532354827672309df6326a0c973c6a975ebdaa3579649631"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.349180 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" event={"ID":"42a9c706-06f8-489f-8b1a-769b1101a2d7","Type":"ContainerStarted","Data":"bd67e52a21c44a4530fae046438782bd763cbfc9781a47a6c31fdf6282926dfc"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.363937 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" event={"ID":"254f6b75-68d4-421f-81a4-1d78e32db94b","Type":"ContainerStarted","Data":"1a54adfac0cd72c8f8918944abce816833426f7b94b228f5890ce08f9c199b0e"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.364019 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" event={"ID":"254f6b75-68d4-421f-81a4-1d78e32db94b","Type":"ContainerStarted","Data":"0ec750487282bbad65891d0ae19b7b423822b2c2773c0c4dc95c278f2713e58f"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.364707 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.368149 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.368495 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.868479259 +0000 UTC m=+147.989684753 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.369828 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" event={"ID":"51627ad8-bcac-4c0e-934d-b99aa94b87ca","Type":"ContainerStarted","Data":"f88be0a9dc50e408090ec5e5b47a617fca8b60560d0793ce063e12eb6eb5c392"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.369894 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" event={"ID":"51627ad8-bcac-4c0e-934d-b99aa94b87ca","Type":"ContainerStarted","Data":"d013cf43f5ea6f05d534fe3f24c5bf0bb2cfa1e604386bbda934770232cfe3d1"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.374705 4975 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-zdfmk container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.374805 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" podUID="254f6b75-68d4-421f-81a4-1d78e32db94b" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.379900 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-6hw84" event={"ID":"ab6d30fc-43fe-46af-8d7c-e67f3b3811d2","Type":"ContainerStarted","Data":"ab1dcb908b7394fcd2e72c8b97d8d4b1c1aaef51b06d14c90293903e4f8a5e93"} Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.469546 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.473556 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:23.97353818 +0000 UTC m=+148.094743664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.498577 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" podStartSLOduration=128.498562116 podStartE2EDuration="2m8.498562116s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.498198538 +0000 UTC m=+147.619404032" watchObservedRunningTime="2026-01-26 00:09:23.498562116 +0000 UTC m=+147.619767610" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.570602 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.572414 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.072399277 +0000 UTC m=+148.193604771 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.574842 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-m8qff" podStartSLOduration=128.574823433 podStartE2EDuration="2m8.574823433s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.568833665 +0000 UTC m=+147.690039159" watchObservedRunningTime="2026-01-26 00:09:23.574823433 +0000 UTC m=+147.696028927" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.604124 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" podStartSLOduration=127.604106048 podStartE2EDuration="2m7.604106048s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.600495325 +0000 UTC m=+147.721700819" watchObservedRunningTime="2026-01-26 00:09:23.604106048 +0000 UTC m=+147.725311542" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.642180 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-6hw84" podStartSLOduration=128.642163895 podStartE2EDuration="2m8.642163895s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.639271928 +0000 UTC m=+147.760477422" watchObservedRunningTime="2026-01-26 00:09:23.642163895 +0000 UTC m=+147.763369389" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.665932 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:23 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:23 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:23 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.666060 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.669921 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-gbxpn" podStartSLOduration=5.669888183 podStartE2EDuration="5.669888183s" podCreationTimestamp="2026-01-26 00:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.666418234 +0000 UTC m=+147.787623738" watchObservedRunningTime="2026-01-26 00:09:23.669888183 +0000 UTC m=+147.791093677" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.673117 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.673483 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.173462476 +0000 UTC m=+148.294667970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.690962 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9hgnx" podStartSLOduration=5.690941329 podStartE2EDuration="5.690941329s" podCreationTimestamp="2026-01-26 00:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.690504358 +0000 UTC m=+147.811709852" watchObservedRunningTime="2026-01-26 00:09:23.690941329 +0000 UTC m=+147.812146823" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.728427 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-b4hpk" podStartSLOduration=128.728404952 podStartE2EDuration="2m8.728404952s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.725146007 +0000 UTC m=+147.846351501" watchObservedRunningTime="2026-01-26 00:09:23.728404952 +0000 UTC m=+147.849610446" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.760766 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" podStartSLOduration=128.760749147 podStartE2EDuration="2m8.760749147s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:23.760280866 +0000 UTC m=+147.881486360" watchObservedRunningTime="2026-01-26 00:09:23.760749147 +0000 UTC m=+147.881954631" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.780486 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.780970 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.280948782 +0000 UTC m=+148.402154276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.813110 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:23 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:23 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:23 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.813158 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.881513 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.881793 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.381783226 +0000 UTC m=+148.502988720 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.918836 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.920163 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:23 crc kubenswrapper[4975]: I0126 00:09:23.984193 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:23 crc kubenswrapper[4975]: E0126 00:09:23.985511 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.485482955 +0000 UTC m=+148.606688449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.055362 4975 patch_prober.go:28] interesting pod/apiserver-76f77b778f-xmsj9 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]log ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]etcd ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/generic-apiserver-start-informers ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/max-in-flight-filter ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 26 00:09:24 crc kubenswrapper[4975]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 26 00:09:24 crc kubenswrapper[4975]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/project.openshift.io-projectcache ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/openshift.io-startinformers ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 26 00:09:24 crc kubenswrapper[4975]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 26 00:09:24 crc kubenswrapper[4975]: livez check failed Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.055415 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" podUID="efda37e1-c7f8-4e41-a9a4-e5191fe797ab" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.087025 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.087352 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.587340232 +0000 UTC m=+148.708545726 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.189423 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.189957 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.689932305 +0000 UTC m=+148.811137799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.262428 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-j6df8"] Jan 26 00:09:24 crc kubenswrapper[4975]: W0126 00:09:24.279193 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfcabeb27_81a2_4c9c_9f7f_f6883e3f4724.slice/crio-d4f27d793d87cb3863cdc211134219e5b50a7b8444d8c8ca8dfa0536d95c8460 WatchSource:0}: Error finding container d4f27d793d87cb3863cdc211134219e5b50a7b8444d8c8ca8dfa0536d95c8460: Status 404 returned error can't find the container with id d4f27d793d87cb3863cdc211134219e5b50a7b8444d8c8ca8dfa0536d95c8460 Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.298223 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.298644 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.798624159 +0000 UTC m=+148.919829653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.322428 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.333269 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.385081 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.385151 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.398832 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.399227 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.899184886 +0000 UTC m=+149.020390370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.399398 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.399946 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:24.899922813 +0000 UTC m=+149.021128307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.401862 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.415092 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.417557 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vjg2l" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.433864 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.434452 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.446700 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.446776 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.464639 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" event={"ID":"86d89d82-a826-4586-b7fb-66866bf100e9","Type":"ContainerStarted","Data":"519f633cb0036f7dcfdc36c3ff09238dc5ddb4ebb4f8efab7d4ecc79e13f88e0"} Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.469962 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-dsbk6"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.472260 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j6df8" event={"ID":"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724","Type":"ContainerStarted","Data":"d4f27d793d87cb3863cdc211134219e5b50a7b8444d8c8ca8dfa0536d95c8460"} Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.472853 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-tb9s5"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.474359 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" event={"ID":"b5e1ff61-8c29-474a-bf64-f9a09b1d166e","Type":"ContainerStarted","Data":"2251d1812d3d54b365873f3622eb989826721ee1f67587e45eb2d4f5c9f5ecb3"} Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.475333 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.482235 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.490767 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.505284 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.505947 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.005914185 +0000 UTC m=+149.127119679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.515968 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.519937 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-65wb9" podStartSLOduration=129.519925208 podStartE2EDuration="2m9.519925208s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:24.519374266 +0000 UTC m=+148.640579780" watchObservedRunningTime="2026-01-26 00:09:24.519925208 +0000 UTC m=+148.641130702" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.533344 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" event={"ID":"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa","Type":"ContainerStarted","Data":"67e6ba3f5debfd456d6a5b42e737e4d6a219983a9bbae6e3cdb689fd40950a5c"} Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.553179 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zdfmk" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.557776 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-d9jj5" podStartSLOduration=129.557758 podStartE2EDuration="2m9.557758s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:24.55342901 +0000 UTC m=+148.674634504" watchObservedRunningTime="2026-01-26 00:09:24.557758 +0000 UTC m=+148.678963494" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.564063 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-g7tn2" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.586844 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-8mddw"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.610818 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.613933 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.618121 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.11809369 +0000 UTC m=+149.239299184 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.682272 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf"] Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.715141 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.715533 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.215517855 +0000 UTC m=+149.336723339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.815698 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:24 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:24 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:24 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.815779 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.816452 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.816877 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.316849199 +0000 UTC m=+149.438054703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.918477 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.918802 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.418753087 +0000 UTC m=+149.539958581 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:24 crc kubenswrapper[4975]: I0126 00:09:24.918971 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:24 crc kubenswrapper[4975]: E0126 00:09:24.919335 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.41931776 +0000 UTC m=+149.540523254 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.020178 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.020641 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.520612784 +0000 UTC m=+149.641818278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.124038 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.124383 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.624369355 +0000 UTC m=+149.745574849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.225993 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.226450 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.726428836 +0000 UTC m=+149.847634330 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.327584 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.331781 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.831758883 +0000 UTC m=+149.952964377 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.428780 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.429051 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:25.929032714 +0000 UTC m=+150.050238208 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.534104 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.534828 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.034813911 +0000 UTC m=+150.156019405 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.572661 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" event={"ID":"3c669669-d4d5-42bf-969b-02661b7cf7a2","Type":"ContainerStarted","Data":"76b893d46eb75129f98eddbdc5bb163a636e84bcab479aa71b6f5d03683ae04c"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.581637 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" event={"ID":"2ead6744-6221-4da7-b612-c1e59bab87e5","Type":"ContainerStarted","Data":"2be5dbb6d76fda3d047d69945ac1d5ac91196b1572dee9ad234058be5a4fda39"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.584034 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" event={"ID":"3d936082-c3b2-4117-9952-638b630b653a","Type":"ContainerStarted","Data":"d420c0d3be65e81b1fcbf9519fdd9dcdc08751adf0ae008d68733edfb4ecfa72"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.584062 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" event={"ID":"3d936082-c3b2-4117-9952-638b630b653a","Type":"ContainerStarted","Data":"738ebb8485b76d6559bf6b21e26be2ee0d5f94e1d1a9104d66fc5888d99fc1b2"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.597507 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" event={"ID":"3e916258-6c75-413f-9d82-2ef568aa3647","Type":"ContainerStarted","Data":"8bea7fcf38b585467ca2ffaad6aaf7bf9b4dd3b93ba0b3bedf8f6c92525f249f"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.597606 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" event={"ID":"3e916258-6c75-413f-9d82-2ef568aa3647","Type":"ContainerStarted","Data":"b3bb83fbeea20687c3c500f923cdb1f2d051ab6fbab69ed11c849408de2963ca"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.617375 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" event={"ID":"c4e5fa00-3bad-48f8-930e-a81d2ffb696b","Type":"ContainerStarted","Data":"842cb8083dd4bff91cd15b1b3b0bd473bcaae26de8a80857dc4afcdf1db0a406"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.617437 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" event={"ID":"c4e5fa00-3bad-48f8-930e-a81d2ffb696b","Type":"ContainerStarted","Data":"16d9613a08341913e8dd817071b37d595558c033c7a3994a2523566a804828dd"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.619402 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" event={"ID":"bb028166-02f4-416d-a53e-1e96d3ad062c","Type":"ContainerStarted","Data":"d5068aa32b078916dfa9c613694f9a7141f18c6c00fb85c3e0db1a3f8bebbec8"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.635676 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.636128 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.136089104 +0000 UTC m=+150.257294628 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.639329 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" event={"ID":"cc3f2a80-2278-4d49-9bfc-6a5cb026394a","Type":"ContainerStarted","Data":"10bb7efb476300352911fd57dd9ffc102d42fac617ee7ae753f6828295465635"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.639370 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" event={"ID":"cc3f2a80-2278-4d49-9bfc-6a5cb026394a","Type":"ContainerStarted","Data":"363085efbf37935bba192be6acb50ddd8567cc69f1759d530058809806e840d1"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.646796 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" event={"ID":"cfe2bc53-bda8-4d4e-8da5-521f3821826c","Type":"ContainerStarted","Data":"c69d645fd0682e9d06c7f188a03e5a2102b9f688fcc1e76f35cedbb9b28c5d3e"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.646831 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" event={"ID":"cfe2bc53-bda8-4d4e-8da5-521f3821826c","Type":"ContainerStarted","Data":"0f2bb39f5636b4d52d83f56802253fc78ac80c2d4e73d7705935625ae5248ba3"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.665632 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3e21b738b3f27e793366d0cd85760cce5054e2931eca98f895a71c9e09620b92"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.687672 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"07a86bf56194e125922c8f42c7fb91dbbd1bb5bc9a9ec4df944bd5e72ea3ea52"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.695897 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-vfc7w" podStartSLOduration=129.695872292 podStartE2EDuration="2m9.695872292s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.663525736 +0000 UTC m=+149.784731230" watchObservedRunningTime="2026-01-26 00:09:25.695872292 +0000 UTC m=+149.817077786" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.698409 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-tb9s5" podStartSLOduration=129.69839843 podStartE2EDuration="2m9.69839843s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.694847458 +0000 UTC m=+149.816052952" watchObservedRunningTime="2026-01-26 00:09:25.69839843 +0000 UTC m=+149.819603924" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.702250 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" event={"ID":"6f5a24d3-8dae-4970-8842-21bddef2373f","Type":"ContainerStarted","Data":"fbd34d9712eb7ca50168f653a326a9a61d24a354f53ae74bc53b4a7c27e37574"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.702298 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" event={"ID":"6f5a24d3-8dae-4970-8842-21bddef2373f","Type":"ContainerStarted","Data":"fc3aa662113ce55881831eb7077dbec910f4e66ad1cf89f36ade2ebecf6d82d8"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.726278 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" event={"ID":"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d","Type":"ContainerStarted","Data":"aff5bf9d9bfc869aa9d6b56d659a521e0966de05e688acbabed18b48d07e6106"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.731266 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"a524adee99beb53f3eb4e2b82dee89d6715d0f9b338187857aeb2f6a7f2bfbf6"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.736878 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.737899 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" event={"ID":"1ed800e9-7ccf-41d7-802e-3fb96798ed9d","Type":"ContainerStarted","Data":"d2fff71075058a7ad835f6cfb11a86cfc2e1f752f3994b06327c7bcdcc29e9d1"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.737945 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" event={"ID":"1ed800e9-7ccf-41d7-802e-3fb96798ed9d","Type":"ContainerStarted","Data":"4ab42745e2234e4bf938f2022d2e3b00e83553d80c6c191cb2633e29a0e41802"} Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.738670 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.238654738 +0000 UTC m=+150.359860232 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.799079 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-j6df8" event={"ID":"fcabeb27-81a2-4c9c-9f7f-f6883e3f4724","Type":"ContainerStarted","Data":"9e154a3d10f0465bba5491f78ba08eebe2bc5f684ffb247c0db6a7a3aee4ce13"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.806892 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" event={"ID":"72455538-0f4e-4b90-a4e8-3c990f4b89df","Type":"ContainerStarted","Data":"2c9502e0ac41c8f36ad2482723f270db40a62d7eaaafa61f8dd682fa11e1e394"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.810494 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:25 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:25 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:25 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.810538 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.812154 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerStarted","Data":"acfcb02a48dc4fad3f9254b4b0163b3a9005fded7aa35e8771ad167ae351b945"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.812185 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerStarted","Data":"1a8578ee6d37e1a4aa9fc7e446a0cf36d54c302ea777b2f258d01df9148992c2"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.813135 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.817712 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" event={"ID":"eeb5c859-42f1-4c67-a8a2-18635447eb4d","Type":"ContainerStarted","Data":"f29acf1124d88045a7644c62e013044d8ef99fb06cac596b85e9edcc96a5ae6d"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.817942 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" event={"ID":"eeb5c859-42f1-4c67-a8a2-18635447eb4d","Type":"ContainerStarted","Data":"e9b9ad3ece9d124625d98f11c1a8d6701c6c893a035f10285d4205683c0f2f34"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.828607 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" event={"ID":"e553fc00-cc5f-41e7-aeae-99be7ec861d4","Type":"ContainerStarted","Data":"050869d20860d3371597ee6b30078cf7b0cfe8c8cfbdbf3ebde5d558cc08861b"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.828304 4975 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8xrbd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.828712 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.841563 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.841662 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.34164244 +0000 UTC m=+150.462847924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.842103 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.859137 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.359113093 +0000 UTC m=+150.480318587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.861570 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" podStartSLOduration=130.861550929 podStartE2EDuration="2m10.861550929s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.774756679 +0000 UTC m=+149.895962183" watchObservedRunningTime="2026-01-26 00:09:25.861550929 +0000 UTC m=+149.982756423" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.862605 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-j6df8" podStartSLOduration=7.862598053 podStartE2EDuration="7.862598053s" podCreationTimestamp="2026-01-26 00:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.860558676 +0000 UTC m=+149.981764170" watchObservedRunningTime="2026-01-26 00:09:25.862598053 +0000 UTC m=+149.983803547" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.867359 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" event={"ID":"2b324e48-dc6f-47db-acb5-9b590fe869cb","Type":"ContainerStarted","Data":"e468daffa22c267756178816e7d29b8f4d9ab1117b79812c6d0f58eeea05da3a"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.867440 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" event={"ID":"2b324e48-dc6f-47db-acb5-9b590fe869cb","Type":"ContainerStarted","Data":"3fdffdc1c9b8ae879d061980f0b6a62c1584d3c6f5f6587ccbec88a0acc5350f"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.868743 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.877605 4975 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-ts2pk container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" start-of-body= Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.877718 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" podUID="2b324e48-dc6f-47db-acb5-9b590fe869cb" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.35:5443/healthz\": dial tcp 10.217.0.35:5443: connect: connection refused" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.917986 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" podStartSLOduration=130.917951478 podStartE2EDuration="2m10.917951478s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.905797018 +0000 UTC m=+150.027002502" watchObservedRunningTime="2026-01-26 00:09:25.917951478 +0000 UTC m=+150.039156972" Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.943287 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:25 crc kubenswrapper[4975]: E0126 00:09:25.943635 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.443614209 +0000 UTC m=+150.564819703 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.950358 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" event={"ID":"c08c3cbe-5ec9-4aaa-a74a-bccfb6a427fa","Type":"ContainerStarted","Data":"cbb2a25cef0f352ce49b15d604be4c1dba6a58259817dd00a70c468f9d286164"} Jan 26 00:09:25 crc kubenswrapper[4975]: I0126 00:09:25.991959 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podStartSLOduration=130.991933982 podStartE2EDuration="2m10.991933982s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:25.985059554 +0000 UTC m=+150.106265048" watchObservedRunningTime="2026-01-26 00:09:25.991933982 +0000 UTC m=+150.113139476" Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.037993 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-xnxsr" podStartSLOduration=131.037975023 podStartE2EDuration="2m11.037975023s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:26.03525132 +0000 UTC m=+150.156456814" watchObservedRunningTime="2026-01-26 00:09:26.037975023 +0000 UTC m=+150.159180517" Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.051570 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.052414 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.552384375 +0000 UTC m=+150.673589869 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.085938 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" podStartSLOduration=130.085867096 podStartE2EDuration="2m10.085867096s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:26.085863816 +0000 UTC m=+150.207069300" watchObservedRunningTime="2026-01-26 00:09:26.085867096 +0000 UTC m=+150.207072590" Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.151998 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.153209 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.653189167 +0000 UTC m=+150.774394661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.153948 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.154273 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.654252762 +0000 UTC m=+150.775458246 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.256122 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.256833 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.756806295 +0000 UTC m=+150.878011789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.357699 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.358224 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.858213491 +0000 UTC m=+150.979418985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.461216 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.462037 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.962016503 +0000 UTC m=+151.083221997 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.462272 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.462950 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:26.962938224 +0000 UTC m=+151.084143718 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.563797 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.564347 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.06432933 +0000 UTC m=+151.185534824 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.667251 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.667599 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.167588259 +0000 UTC m=+151.288793753 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.776534 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.780140 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.280092501 +0000 UTC m=+151.401298005 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.829933 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:26 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:26 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:26 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.829995 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.879666 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.880027 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.380012153 +0000 UTC m=+151.501217647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.984716 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.984824 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.484804487 +0000 UTC m=+151.606009981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.985021 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:26 crc kubenswrapper[4975]: E0126 00:09:26.985276 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.485269458 +0000 UTC m=+151.606474952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.990689 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-cctnf" event={"ID":"e553fc00-cc5f-41e7-aeae-99be7ec861d4","Type":"ContainerStarted","Data":"d7bc99154c406eb647e4b52fb69c660a6b825760c7e76e3ac7d7356af4ff6e21"} Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.994912 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"226cf0940a96aba7ecd5177adcd7f162c82afb50fc6bfabc05613d5da2b66bca"} Jan 26 00:09:26 crc kubenswrapper[4975]: I0126 00:09:26.999901 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" event={"ID":"bb028166-02f4-416d-a53e-1e96d3ad062c","Type":"ContainerStarted","Data":"7d083d33f121736602d99dc6fec60b623fbc86c31d45aa3749a1d9edb80ae23c"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.009125 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" event={"ID":"3c669669-d4d5-42bf-969b-02661b7cf7a2","Type":"ContainerStarted","Data":"ea47d52f6230c5a7c7f13915e9dde6e8ce08fe41aa0aeb4febc485eac0844d18"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.009366 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.011561 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" event={"ID":"3d936082-c3b2-4117-9952-638b630b653a","Type":"ContainerStarted","Data":"759b73aa12870cc4c873f72d842ecaf5ad91c28c7244716013a2c643c470101c"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.013151 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" event={"ID":"6a72f0b8-41a1-46ab-8ad4-d0448b255e6d","Type":"ContainerStarted","Data":"0de7e46fffd3b84c57eb574653058798bf50fe7dbdb69c342ee3a1e787f52ab1"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.018862 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" event={"ID":"c4e5fa00-3bad-48f8-930e-a81d2ffb696b","Type":"ContainerStarted","Data":"ab541090f82ac07b1257ae5bfa3eb07bd32fd9c36f299efae46514fb58640c1f"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.019697 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" event={"ID":"72455538-0f4e-4b90-a4e8-3c990f4b89df","Type":"ContainerStarted","Data":"51a70965b806764993a0e824cfca167b7a9b6f6b078109040ce1659f177cafe5"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.024384 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" event={"ID":"3e916258-6c75-413f-9d82-2ef568aa3647","Type":"ContainerStarted","Data":"73146f2971cb29e657835e4f38a86cc8a7caad4a52215346d9676e45238a632f"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.038557 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-gqttk" podStartSLOduration=132.038542345 podStartE2EDuration="2m12.038542345s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.036226082 +0000 UTC m=+151.157431576" watchObservedRunningTime="2026-01-26 00:09:27.038542345 +0000 UTC m=+151.159747829" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.040980 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" event={"ID":"eeb5c859-42f1-4c67-a8a2-18635447eb4d","Type":"ContainerStarted","Data":"ed415e8ad8095d2d0b9f849c3a1931ed8504a92aecc1cf7b1e5ffc9d01d25230"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.048447 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.056938 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"809b9e1ac868e6a6ec505e50ff4d62affe8d99a30666e7f58a89a17bdc885ac0"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.058823 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"c417d4ad0b50f3da0cf7b463d54c65a8aadb2bea9095f4f9d87b99ffae9e265d"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.059259 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.060426 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" event={"ID":"1ed800e9-7ccf-41d7-802e-3fb96798ed9d","Type":"ContainerStarted","Data":"29b0659a568cc528d9de6a9e78fabb0e6949d5e3683c44555cded72212571150"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.060801 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.063981 4975 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8xrbd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.064038 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.064949 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" event={"ID":"2ead6744-6221-4da7-b612-c1e59bab87e5","Type":"ContainerStarted","Data":"2060c4e41af14eba38bc5348042d32c6afdce9c99c9fd2224f62dc0d7cd73878"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.064973 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" event={"ID":"2ead6744-6221-4da7-b612-c1e59bab87e5","Type":"ContainerStarted","Data":"46a8ca212e38a2e98b88402e19c67b160d7ba2e6db89751b51cc3cb09fc28b0d"} Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.089266 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.089496 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.589474139 +0000 UTC m=+151.710679623 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.089659 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.089992 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.589979801 +0000 UTC m=+151.711185295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.093051 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-ts2pk" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.095263 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-d8v59" podStartSLOduration=132.095251292 podStartE2EDuration="2m12.095251292s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.073956111 +0000 UTC m=+151.195161605" watchObservedRunningTime="2026-01-26 00:09:27.095251292 +0000 UTC m=+151.216456776" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.095682 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-pdn2l" podStartSLOduration=132.095678732 podStartE2EDuration="2m12.095678732s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.094395642 +0000 UTC m=+151.215601146" watchObservedRunningTime="2026-01-26 00:09:27.095678732 +0000 UTC m=+151.216884226" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.139222 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-8c46q" podStartSLOduration=132.139199225 podStartE2EDuration="2m12.139199225s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.125676583 +0000 UTC m=+151.246882077" watchObservedRunningTime="2026-01-26 00:09:27.139199225 +0000 UTC m=+151.260404719" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.185001 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zjr5q" podStartSLOduration=132.184983299 podStartE2EDuration="2m12.184983299s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.152118902 +0000 UTC m=+151.273324396" watchObservedRunningTime="2026-01-26 00:09:27.184983299 +0000 UTC m=+151.306188783" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.191467 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.193535 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.693519516 +0000 UTC m=+151.814725010 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.252926 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-fpdjm" podStartSLOduration=132.252908224 podStartE2EDuration="2m12.252908224s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.251245666 +0000 UTC m=+151.372451160" watchObservedRunningTime="2026-01-26 00:09:27.252908224 +0000 UTC m=+151.374113718" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.308582 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.310698 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.810684035 +0000 UTC m=+151.931889529 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.341591 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-8mddw" podStartSLOduration=132.341571207 podStartE2EDuration="2m12.341571207s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.31130115 +0000 UTC m=+151.432506644" watchObservedRunningTime="2026-01-26 00:09:27.341571207 +0000 UTC m=+151.462776711" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.402247 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-8dkw2" podStartSLOduration=132.402219385 podStartE2EDuration="2m12.402219385s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.343974382 +0000 UTC m=+151.465179876" watchObservedRunningTime="2026-01-26 00:09:27.402219385 +0000 UTC m=+151.523424879" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.413201 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.413419 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.913382062 +0000 UTC m=+152.034587556 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.413547 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.413977 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:27.913969165 +0000 UTC m=+152.035174659 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.515035 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.515263 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.015240169 +0000 UTC m=+152.136445663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.515330 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.515634 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.015626868 +0000 UTC m=+152.136832362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.616087 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.616294 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.116263016 +0000 UTC m=+152.237468510 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.616508 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.616967 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.116941432 +0000 UTC m=+152.238146926 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.717394 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.717621 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.217599011 +0000 UTC m=+152.338804505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.810154 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:27 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:27 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:27 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.810221 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.818685 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.819322 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.319300894 +0000 UTC m=+152.440506388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:27 crc kubenswrapper[4975]: I0126 00:09:27.920085 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:27 crc kubenswrapper[4975]: E0126 00:09:27.920330 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.420314351 +0000 UTC m=+152.541519845 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.021573 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.022190 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.522163558 +0000 UTC m=+152.643369052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.069392 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" event={"ID":"bb028166-02f4-416d-a53e-1e96d3ad062c","Type":"ContainerStarted","Data":"a4fad0b50272b179bf020ef33e8fcd2d548a5c8b66ac67f4f78e433017abdce5"} Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.070041 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" event={"ID":"bb028166-02f4-416d-a53e-1e96d3ad062c","Type":"ContainerStarted","Data":"0acdee717ae2546ef8c8fb293e37b50c9867850d158e0671e0ba5e3425da31df"} Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.106127 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.110563 4975 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.124170 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.124314 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.624289041 +0000 UTC m=+152.745494535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.127299 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.127619 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.627607577 +0000 UTC m=+152.748813071 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.130324 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" podStartSLOduration=132.130308069 podStartE2EDuration="2m12.130308069s" podCreationTimestamp="2026-01-26 00:07:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:27.430362943 +0000 UTC m=+151.551568437" watchObservedRunningTime="2026-01-26 00:09:28.130308069 +0000 UTC m=+152.251513563" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.228337 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.228631 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.728605994 +0000 UTC m=+152.849811478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.329279 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.329786 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.829765315 +0000 UTC m=+152.950970809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.430050 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.430288 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:28.93025092 +0000 UTC m=+153.051456414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.440000 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.440067 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.442011 4975 patch_prober.go:28] interesting pod/console-f9d7485db-wrh4k container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.442101 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-wrh4k" podUID="fe206c78-fea9-4b0e-b236-3e4bd73f1d13" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.501539 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.503085 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.507871 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.518392 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.530844 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.530913 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.530956 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.531034 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsn6v\" (UniqueName: \"kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.531987 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-26 00:09:29.031963294 +0000 UTC m=+153.153168788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-vd6qz" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.631842 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.632004 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: E0126 00:09:28.632115 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-26 00:09:29.13207581 +0000 UTC m=+153.253281304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.632282 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.632465 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsn6v\" (UniqueName: \"kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.633154 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.633433 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.651671 4975 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-26T00:09:28.110587095Z","Handler":null,"Name":""} Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.660449 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsn6v\" (UniqueName: \"kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v\") pod \"community-operators-lx8th\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.667422 4975 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.667483 4975 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.708685 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.709901 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.711757 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.719717 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.733207 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.733262 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.733284 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fsqr\" (UniqueName: \"kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.733300 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.736511 4975 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.736562 4975 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.774652 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-vd6qz\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.805227 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:28 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:28 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:28 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.805276 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.816621 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.834338 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.834512 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fsqr\" (UniqueName: \"kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.834538 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.834623 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.835171 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.835232 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.842692 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.856446 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fsqr\" (UniqueName: \"kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr\") pod \"certified-operators-fccwg\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.896781 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4b4kw"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.897860 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.908965 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.913362 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b4kw"] Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.915442 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:28 crc kubenswrapper[4975]: I0126 00:09:28.919142 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-xmsj9" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.033092 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.038552 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.039171 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.039195 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbtn4\" (UniqueName: \"kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.088948 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.089010 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.088961 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.089289 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.107626 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mmf4j"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.113534 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.120466 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mmf4j"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.124321 4975 generic.go:334] "Generic (PLEG): container finished" podID="6f5a24d3-8dae-4970-8842-21bddef2373f" containerID="fbd34d9712eb7ca50168f653a326a9a61d24a354f53ae74bc53b4a7c27e37574" exitCode=0 Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.124383 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" event={"ID":"6f5a24d3-8dae-4970-8842-21bddef2373f","Type":"ContainerDied","Data":"fbd34d9712eb7ca50168f653a326a9a61d24a354f53ae74bc53b4a7c27e37574"} Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.140400 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.140458 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbtn4\" (UniqueName: \"kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.140507 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.141192 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.141494 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.164287 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" event={"ID":"bb028166-02f4-416d-a53e-1e96d3ad062c","Type":"ContainerStarted","Data":"d6fb0a01f3ce6b65010587520d3f853eaca2cdf4e402caa46718e9387add724a"} Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.167452 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbtn4\" (UniqueName: \"kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4\") pod \"community-operators-4b4kw\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.190338 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-dsbk6" podStartSLOduration=11.190314002 podStartE2EDuration="11.190314002s" podCreationTimestamp="2026-01-26 00:09:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:29.187158649 +0000 UTC m=+153.308364143" watchObservedRunningTime="2026-01-26 00:09:29.190314002 +0000 UTC m=+153.311519496" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.248770 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.248878 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s92qd\" (UniqueName: \"kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.249227 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.298374 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.346347 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.349968 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.350031 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s92qd\" (UniqueName: \"kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.350135 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.353282 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.355160 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.371531 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s92qd\" (UniqueName: \"kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd\") pod \"certified-operators-mmf4j\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.459256 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.472044 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.472906 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.482175 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.482454 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.491541 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.562532 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.562584 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.583501 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.664456 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.665160 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.665286 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.712036 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.781530 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.825261 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:29 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:29 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:29 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.825296 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.861835 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:29 crc kubenswrapper[4975]: I0126 00:09:29.996280 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mmf4j"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.072668 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4b4kw"] Jan 26 00:09:30 crc kubenswrapper[4975]: W0126 00:09:30.086565 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf99b29a_8baa_405f_a1f1_84116ba167a8.slice/crio-bcbb87aa9e61296d3377987908fff7719189f190b7c73d06bcf0c5b1bb3d5c35 WatchSource:0}: Error finding container bcbb87aa9e61296d3377987908fff7719189f190b7c73d06bcf0c5b1bb3d5c35: Status 404 returned error can't find the container with id bcbb87aa9e61296d3377987908fff7719189f190b7c73d06bcf0c5b1bb3d5c35 Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.162682 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.170072 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerStarted","Data":"50cbeb4d625c525ecf5b6ce724e964ab10e1260dd51ff5f92359b3334df4c35f"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.171113 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" event={"ID":"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794","Type":"ContainerStarted","Data":"b7c141cf31dee562a2a2e52a24959310abc5214af8ac37ad9b113366e3efc9dd"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.173429 4975 generic.go:334] "Generic (PLEG): container finished" podID="75c99291-b46b-4f76-9922-cd530cca51c9" containerID="23615b64c47ff1d74cf4860b0afb9efb0da12db5d302ae18ad5e0d58666a3ed9" exitCode=0 Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.173496 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerDied","Data":"23615b64c47ff1d74cf4860b0afb9efb0da12db5d302ae18ad5e0d58666a3ed9"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.173521 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerStarted","Data":"6d49219568d933cec9f451c107dda2ccd2dc2bc13f18ee02e85720676baedfbb"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.175303 4975 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.177519 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerStarted","Data":"bcbb87aa9e61296d3377987908fff7719189f190b7c73d06bcf0c5b1bb3d5c35"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.181604 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerStarted","Data":"e4206fb6373bb68ed40eb78deae9502889790049830617fb67a98412001375b8"} Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.547981 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.549531 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.561905 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.579783 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.711929 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z84m2\" (UniqueName: \"kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.712401 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.712438 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.781216 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.801720 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.808950 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:30 crc kubenswrapper[4975]: [-]has-synced failed: reason withheld Jan 26 00:09:30 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:30 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.809003 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.825930 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z84m2\" (UniqueName: \"kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.826033 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.826118 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.827086 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.828410 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.855672 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z84m2\" (UniqueName: \"kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2\") pod \"redhat-marketplace-hmnjx\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.904694 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7g6z6"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.905197 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:30 crc kubenswrapper[4975]: E0126 00:09:30.909678 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f5a24d3-8dae-4970-8842-21bddef2373f" containerName="collect-profiles" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.909725 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f5a24d3-8dae-4970-8842-21bddef2373f" containerName="collect-profiles" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.909899 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f5a24d3-8dae-4970-8842-21bddef2373f" containerName="collect-profiles" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.911050 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.924672 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7g6z6"] Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.926921 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bqbpq\" (UniqueName: \"kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq\") pod \"6f5a24d3-8dae-4970-8842-21bddef2373f\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.926974 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume\") pod \"6f5a24d3-8dae-4970-8842-21bddef2373f\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.927037 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume\") pod \"6f5a24d3-8dae-4970-8842-21bddef2373f\" (UID: \"6f5a24d3-8dae-4970-8842-21bddef2373f\") " Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.927160 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.927203 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jv9q8\" (UniqueName: \"kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.927222 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.927905 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume" (OuterVolumeSpecName: "config-volume") pod "6f5a24d3-8dae-4970-8842-21bddef2373f" (UID: "6f5a24d3-8dae-4970-8842-21bddef2373f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.933071 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq" (OuterVolumeSpecName: "kube-api-access-bqbpq") pod "6f5a24d3-8dae-4970-8842-21bddef2373f" (UID: "6f5a24d3-8dae-4970-8842-21bddef2373f"). InnerVolumeSpecName "kube-api-access-bqbpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.938080 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "6f5a24d3-8dae-4970-8842-21bddef2373f" (UID: "6f5a24d3-8dae-4970-8842-21bddef2373f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:09:30 crc kubenswrapper[4975]: I0126 00:09:30.957593 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028339 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028795 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jv9q8\" (UniqueName: \"kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028826 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028872 4975 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/6f5a24d3-8dae-4970-8842-21bddef2373f-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028886 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bqbpq\" (UniqueName: \"kubernetes.io/projected/6f5a24d3-8dae-4970-8842-21bddef2373f-kube-api-access-bqbpq\") on node \"crc\" DevicePath \"\"" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.028896 4975 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6f5a24d3-8dae-4970-8842-21bddef2373f-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.029036 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.029287 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.054982 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jv9q8\" (UniqueName: \"kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8\") pod \"redhat-marketplace-7g6z6\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.185552 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:09:31 crc kubenswrapper[4975]: W0126 00:09:31.197538 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod927dc4e6_f4c4_497b_92d6_3218ab0794ac.slice/crio-0646effc2cb331d655d0c8a8444f0f248b74a419c32606dadfe257fe95033c9b WatchSource:0}: Error finding container 0646effc2cb331d655d0c8a8444f0f248b74a419c32606dadfe257fe95033c9b: Status 404 returned error can't find the container with id 0646effc2cb331d655d0c8a8444f0f248b74a419c32606dadfe257fe95033c9b Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.200985 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b","Type":"ContainerStarted","Data":"ee84f8b378d1eb2118bb1d42ca8ebfb36852a2d60076939c18672033f55dff1b"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.204812 4975 generic.go:334] "Generic (PLEG): container finished" podID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerID="1dfa539b39512da0673c2c1ef3e9d9f5e7c70eb9a1c87f8af0da41ca3ebf2c57" exitCode=0 Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.205582 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerDied","Data":"1dfa539b39512da0673c2c1ef3e9d9f5e7c70eb9a1c87f8af0da41ca3ebf2c57"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.208028 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.208577 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489760-6nbzp" event={"ID":"6f5a24d3-8dae-4970-8842-21bddef2373f","Type":"ContainerDied","Data":"fc3aa662113ce55881831eb7077dbec910f4e66ad1cf89f36ade2ebecf6d82d8"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.208622 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc3aa662113ce55881831eb7077dbec910f4e66ad1cf89f36ade2ebecf6d82d8" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.209821 4975 generic.go:334] "Generic (PLEG): container finished" podID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerID="517d5130a369efec7ffbbbc8cfac6d33522ca9fddc5bed3d3c70859ee4c6744c" exitCode=0 Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.209865 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerDied","Data":"517d5130a369efec7ffbbbc8cfac6d33522ca9fddc5bed3d3c70859ee4c6744c"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.211897 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" event={"ID":"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794","Type":"ContainerStarted","Data":"8212396d902b9d79df60d0a101cb395096b2ec6d613f684276cea4582078481f"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.212093 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.214071 4975 generic.go:334] "Generic (PLEG): container finished" podID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerID="1f8497fee0a16151aa8b82259c9f2bbe80222175a533b596194e7dba5e0f47d4" exitCode=0 Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.214121 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerDied","Data":"1f8497fee0a16151aa8b82259c9f2bbe80222175a533b596194e7dba5e0f47d4"} Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.247590 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" podStartSLOduration=136.24757173 podStartE2EDuration="2m16.24757173s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:31.244285694 +0000 UTC m=+155.365491188" watchObservedRunningTime="2026-01-26 00:09:31.24757173 +0000 UTC m=+155.368777224" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.253546 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.606954 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7g6z6"] Jan 26 00:09:31 crc kubenswrapper[4975]: W0126 00:09:31.614251 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod518ddce4_70cd_4aca_a096_37237d16dd76.slice/crio-df062c9b8e60c24edc5e7acacfff5cc175c6f1a9de7bce52ec7f2cc452ac7c66 WatchSource:0}: Error finding container df062c9b8e60c24edc5e7acacfff5cc175c6f1a9de7bce52ec7f2cc452ac7c66: Status 404 returned error can't find the container with id df062c9b8e60c24edc5e7acacfff5cc175c6f1a9de7bce52ec7f2cc452ac7c66 Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.699633 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.700867 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.703707 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.717909 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.805698 4975 patch_prober.go:28] interesting pod/router-default-5444994796-6hw84 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 26 00:09:31 crc kubenswrapper[4975]: [+]has-synced ok Jan 26 00:09:31 crc kubenswrapper[4975]: [+]process-running ok Jan 26 00:09:31 crc kubenswrapper[4975]: healthz check failed Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.806050 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-6hw84" podUID="ab6d30fc-43fe-46af-8d7c-e67f3b3811d2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.846445 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.846558 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbtcw\" (UniqueName: \"kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.846607 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.950979 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.951088 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbtcw\" (UniqueName: \"kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.951123 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.951672 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.951857 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:31 crc kubenswrapper[4975]: I0126 00:09:31.976625 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbtcw\" (UniqueName: \"kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw\") pod \"redhat-operators-n67bw\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.051493 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.106277 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nh5hc"] Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.108012 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.113353 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nh5hc"] Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.153365 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg988\" (UniqueName: \"kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.153413 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.153437 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.230464 4975 generic.go:334] "Generic (PLEG): container finished" podID="518ddce4-70cd-4aca-a096-37237d16dd76" containerID="7c2d6b68475e273f7ecacb80c49a8724d4312653dfd516ccc43c7108c905f870" exitCode=0 Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.230571 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerDied","Data":"7c2d6b68475e273f7ecacb80c49a8724d4312653dfd516ccc43c7108c905f870"} Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.230696 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerStarted","Data":"df062c9b8e60c24edc5e7acacfff5cc175c6f1a9de7bce52ec7f2cc452ac7c66"} Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.233848 4975 generic.go:334] "Generic (PLEG): container finished" podID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerID="90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5" exitCode=0 Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.233907 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerDied","Data":"90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5"} Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.233940 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerStarted","Data":"0646effc2cb331d655d0c8a8444f0f248b74a419c32606dadfe257fe95033c9b"} Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.264706 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.264764 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.264916 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg988\" (UniqueName: \"kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.265638 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.265919 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.274644 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b","Type":"ContainerStarted","Data":"3030f564758aea2b9e74ce71c334c868bb885b2d2bb40f9421928ce6fd017038"} Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.299242 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg988\" (UniqueName: \"kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988\") pod \"redhat-operators-nh5hc\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.382321 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.382298294 podStartE2EDuration="3.382298294s" podCreationTimestamp="2026-01-26 00:09:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:32.324796709 +0000 UTC m=+156.446002203" watchObservedRunningTime="2026-01-26 00:09:32.382298294 +0000 UTC m=+156.503503788" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.383954 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.450757 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.498707 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9hgnx" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.821060 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.831160 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-6hw84" Jan 26 00:09:32 crc kubenswrapper[4975]: I0126 00:09:32.852323 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nh5hc"] Jan 26 00:09:32 crc kubenswrapper[4975]: W0126 00:09:32.919510 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77eccefb_3716_4e9d_9807_059400c1c934.slice/crio-45b87184c693dfa18f337a0b27aa36465406aa3cfb88602681d77e6a0a902465 WatchSource:0}: Error finding container 45b87184c693dfa18f337a0b27aa36465406aa3cfb88602681d77e6a0a902465: Status 404 returned error can't find the container with id 45b87184c693dfa18f337a0b27aa36465406aa3cfb88602681d77e6a0a902465 Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.298870 4975 generic.go:334] "Generic (PLEG): container finished" podID="b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" containerID="3030f564758aea2b9e74ce71c334c868bb885b2d2bb40f9421928ce6fd017038" exitCode=0 Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.299485 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b","Type":"ContainerDied","Data":"3030f564758aea2b9e74ce71c334c868bb885b2d2bb40f9421928ce6fd017038"} Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.312968 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerStarted","Data":"45b87184c693dfa18f337a0b27aa36465406aa3cfb88602681d77e6a0a902465"} Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.317224 4975 generic.go:334] "Generic (PLEG): container finished" podID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerID="a7017a6f1d2aa42698072decc1aa3f9dfe2c7e17a5347ef266baa53302756de4" exitCode=0 Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.317548 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerDied","Data":"a7017a6f1d2aa42698072decc1aa3f9dfe2c7e17a5347ef266baa53302756de4"} Jan 26 00:09:33 crc kubenswrapper[4975]: I0126 00:09:33.317618 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerStarted","Data":"be4ffe6cdcf63f21b5c62a03ef992110851f5c38ca8dac5072681a1b69361b70"} Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.361167 4975 generic.go:334] "Generic (PLEG): container finished" podID="77eccefb-3716-4e9d-9807-059400c1c934" containerID="7b558a412cb8a98230bc192ccee476485fd31bec7979a674150cbd55c152d9ab" exitCode=0 Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.361555 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerDied","Data":"7b558a412cb8a98230bc192ccee476485fd31bec7979a674150cbd55c152d9ab"} Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.622448 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.623452 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.629424 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.632493 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.634519 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.710626 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.710728 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.812174 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.812538 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.812638 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.841277 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:34 crc kubenswrapper[4975]: I0126 00:09:34.974041 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.000438 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.124386 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access\") pod \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.124545 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir\") pod \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\" (UID: \"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b\") " Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.124693 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" (UID: "b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.124950 4975 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.136293 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" (UID: "b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.225770 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.406975 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.410981 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b","Type":"ContainerDied","Data":"ee84f8b378d1eb2118bb1d42ca8ebfb36852a2d60076939c18672033f55dff1b"} Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.411020 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 26 00:09:35 crc kubenswrapper[4975]: I0126 00:09:35.411046 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee84f8b378d1eb2118bb1d42ca8ebfb36852a2d60076939c18672033f55dff1b" Jan 26 00:09:36 crc kubenswrapper[4975]: I0126 00:09:36.450952 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a","Type":"ContainerStarted","Data":"bd67cd0b5561673ef03aaddfedf2c07b942b11ee9f79c6edf1c323fc7163bccd"} Jan 26 00:09:37 crc kubenswrapper[4975]: E0126 00:09:37.451150 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-pod99ad6ed0_b1e6_4a42_8c27_d00d5a6f478a.slice/crio-e77408608705604913481b8a392797d2a2417024a19ed4d20a30cabc6e9e2b03.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:09:37 crc kubenswrapper[4975]: I0126 00:09:37.470018 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a","Type":"ContainerStarted","Data":"e77408608705604913481b8a392797d2a2417024a19ed4d20a30cabc6e9e2b03"} Jan 26 00:09:37 crc kubenswrapper[4975]: I0126 00:09:37.494133 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.494114711 podStartE2EDuration="3.494114711s" podCreationTimestamp="2026-01-26 00:09:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:09:37.492833042 +0000 UTC m=+161.614038546" watchObservedRunningTime="2026-01-26 00:09:37.494114711 +0000 UTC m=+161.615320205" Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.455151 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.459463 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-wrh4k" Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.526443 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.547719 4975 generic.go:334] "Generic (PLEG): container finished" podID="99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" containerID="e77408608705604913481b8a392797d2a2417024a19ed4d20a30cabc6e9e2b03" exitCode=0 Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.547867 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a","Type":"ContainerDied","Data":"e77408608705604913481b8a392797d2a2417024a19ed4d20a30cabc6e9e2b03"} Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.548108 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/99d35071-9f6d-45df-841f-fd49ea0550c1-metrics-certs\") pod \"network-metrics-daemon-s459q\" (UID: \"99d35071-9f6d-45df-841f-fd49ea0550c1\") " pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:38 crc kubenswrapper[4975]: I0126 00:09:38.564230 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-s459q" Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.088360 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.088680 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.088360 4975 patch_prober.go:28] interesting pod/downloads-7954f5f757-cbkrr container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" start-of-body= Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.089100 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-cbkrr" podUID="c1bf1b13-966b-4a74-935c-47af817d7777" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.13:8080/\": dial tcp 10.217.0.13:8080: connect: connection refused" Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.430185 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-s459q"] Jan 26 00:09:39 crc kubenswrapper[4975]: W0126 00:09:39.470343 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99d35071_9f6d_45df_841f_fd49ea0550c1.slice/crio-35c1df1fbb76840891beb13205b11a864656e3687549d9b1d72ff03bd6b696a6 WatchSource:0}: Error finding container 35c1df1fbb76840891beb13205b11a864656e3687549d9b1d72ff03bd6b696a6: Status 404 returned error can't find the container with id 35c1df1fbb76840891beb13205b11a864656e3687549d9b1d72ff03bd6b696a6 Jan 26 00:09:39 crc kubenswrapper[4975]: I0126 00:09:39.580225 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s459q" event={"ID":"99d35071-9f6d-45df-841f-fd49ea0550c1","Type":"ContainerStarted","Data":"35c1df1fbb76840891beb13205b11a864656e3687549d9b1d72ff03bd6b696a6"} Jan 26 00:09:40 crc kubenswrapper[4975]: I0126 00:09:40.481265 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:09:40 crc kubenswrapper[4975]: I0126 00:09:40.481325 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:09:41 crc kubenswrapper[4975]: I0126 00:09:41.705437 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s459q" event={"ID":"99d35071-9f6d-45df-841f-fd49ea0550c1","Type":"ContainerStarted","Data":"096c937d1b93df342029caa3120aa962fedc4c9936c8b9e8d7ddd87420f3c762"} Jan 26 00:09:48 crc kubenswrapper[4975]: I0126 00:09:48.914839 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:09:49 crc kubenswrapper[4975]: I0126 00:09:49.095631 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-cbkrr" Jan 26 00:09:55 crc kubenswrapper[4975]: I0126 00:09:55.837187 4975 generic.go:334] "Generic (PLEG): container finished" podID="e127f059-17d0-4da8-899b-94674c0ff01f" containerID="a80ad68f393a6b165d9de3d05159ee29e83862fa018f8e2c1588497dca848d51" exitCode=0 Jan 26 00:09:55 crc kubenswrapper[4975]: I0126 00:09:55.837242 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29489760-c9d48" event={"ID":"e127f059-17d0-4da8-899b-94674c0ff01f","Type":"ContainerDied","Data":"a80ad68f393a6b165d9de3d05159ee29e83862fa018f8e2c1588497dca848d51"} Jan 26 00:10:01 crc kubenswrapper[4975]: I0126 00:10:01.054451 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-l9vhw" Jan 26 00:10:02 crc kubenswrapper[4975]: I0126 00:10:02.482188 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.692289 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.697288 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.766658 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access\") pod \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.766742 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir\") pod \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\" (UID: \"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a\") " Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.766868 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sf9vp\" (UniqueName: \"kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp\") pod \"e127f059-17d0-4da8-899b-94674c0ff01f\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.766905 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca\") pod \"e127f059-17d0-4da8-899b-94674c0ff01f\" (UID: \"e127f059-17d0-4da8-899b-94674c0ff01f\") " Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.766931 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" (UID: "99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.767116 4975 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.767761 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca" (OuterVolumeSpecName: "serviceca") pod "e127f059-17d0-4da8-899b-94674c0ff01f" (UID: "e127f059-17d0-4da8-899b-94674c0ff01f"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.772097 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp" (OuterVolumeSpecName: "kube-api-access-sf9vp") pod "e127f059-17d0-4da8-899b-94674c0ff01f" (UID: "e127f059-17d0-4da8-899b-94674c0ff01f"). InnerVolumeSpecName "kube-api-access-sf9vp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.772167 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" (UID: "99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.869140 4975 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e127f059-17d0-4da8-899b-94674c0ff01f-serviceca\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.869185 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.869203 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sf9vp\" (UniqueName: \"kubernetes.io/projected/e127f059-17d0-4da8-899b-94674c0ff01f-kube-api-access-sf9vp\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.915440 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29489760-c9d48" event={"ID":"e127f059-17d0-4da8-899b-94674c0ff01f","Type":"ContainerDied","Data":"35412c04d0f6db08792428eca5e4d88dbe2049be446130a62bade1a54f238c87"} Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.915492 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35412c04d0f6db08792428eca5e4d88dbe2049be446130a62bade1a54f238c87" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.915564 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29489760-c9d48" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.917833 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a","Type":"ContainerDied","Data":"bd67cd0b5561673ef03aaddfedf2c07b942b11ee9f79c6edf1c323fc7163bccd"} Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.917874 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bd67cd0b5561673ef03aaddfedf2c07b942b11ee9f79c6edf1c323fc7163bccd" Jan 26 00:10:06 crc kubenswrapper[4975]: I0126 00:10:06.917894 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 26 00:10:08 crc kubenswrapper[4975]: E0126 00:10:08.820386 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 00:10:08 crc kubenswrapper[4975]: E0126 00:10:08.820829 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cbtn4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4b4kw_openshift-marketplace(af99b29a-8baa-405f-a1f1-84116ba167a8): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:08 crc kubenswrapper[4975]: E0126 00:10:08.821988 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4b4kw" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.414888 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 00:10:10 crc kubenswrapper[4975]: E0126 00:10:10.415724 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e127f059-17d0-4da8-899b-94674c0ff01f" containerName="image-pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415754 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="e127f059-17d0-4da8-899b-94674c0ff01f" containerName="image-pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: E0126 00:10:10.415771 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415777 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: E0126 00:10:10.415783 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415790 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415893 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="e127f059-17d0-4da8-899b-94674c0ff01f" containerName="image-pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415916 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1da9dd1-9eab-4de3-a4aa-4719a78e7e0b" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.415929 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="99ad6ed0-b1e6-4a42-8c27-d00d5a6f478a" containerName="pruner" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.416389 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.418885 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.419115 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.421265 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.481969 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.482106 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.528878 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.529228 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.634932 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.634347 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.636003 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.668958 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:10 crc kubenswrapper[4975]: I0126 00:10:10.782350 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.013820 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4b4kw" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.113409 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.113568 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jbtcw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-n67bw_openshift-marketplace(034b4c5e-88c6-4dd7-a67d-bd9c23a75192): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.114922 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-n67bw" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.116239 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.116324 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rg988,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-nh5hc_openshift-marketplace(77eccefb-3716-4e9d-9807-059400c1c934): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:14 crc kubenswrapper[4975]: E0126 00:10:14.119903 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-nh5hc" podUID="77eccefb-3716-4e9d-9807-059400c1c934" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.425491 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.428533 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.433318 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.483058 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.483095 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.483136 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.584310 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.584361 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.584436 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.584460 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.584506 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.603552 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access\") pod \"installer-9-crc\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:14 crc kubenswrapper[4975]: I0126 00:10:14.757083 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.602233 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-nh5hc" podUID="77eccefb-3716-4e9d-9807-059400c1c934" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.602417 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-n67bw" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.676223 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.676398 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s92qd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-mmf4j_openshift-marketplace(d2aae40a-ded6-40de-a541-f22ef90f71e5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.677571 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-mmf4j" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.689147 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.689339 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsn6v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lx8th_openshift-marketplace(75c99291-b46b-4f76-9922-cd530cca51c9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.690867 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lx8th" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.710215 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.710349 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7fsqr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-fccwg_openshift-marketplace(27ed45eb-a90d-4bd5-8a17-8988f53407aa): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:15 crc kubenswrapper[4975]: E0126 00:10:15.711556 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-fccwg" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.842758 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-mmf4j" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.842902 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-fccwg" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.842896 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-lx8th" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.916301 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.916585 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z84m2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-hmnjx_openshift-marketplace(927dc4e6-f4c4-497b-92d6-3218ab0794ac): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.916934 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.917092 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jv9q8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7g6z6_openshift-marketplace(518ddce4-70cd-4aca-a096-37237d16dd76): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.919059 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7g6z6" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.920707 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-hmnjx" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.972335 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7g6z6" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" Jan 26 00:10:16 crc kubenswrapper[4975]: E0126 00:10:16.972474 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-hmnjx" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.200326 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.355538 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.974642 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"93092266-4d3b-451b-bf92-c033c3f62937","Type":"ContainerStarted","Data":"7a35b177fe070cdeaf5f35e775496dbc63a8ca195e51c53730b5785a30ab3e23"} Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.976021 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910","Type":"ContainerStarted","Data":"b0a42c6665d1bf1833b1b2a50d3b02f5b85d66c1eaadbff6f9813e17204598ae"} Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.978309 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-s459q" event={"ID":"99d35071-9f6d-45df-841f-fd49ea0550c1","Type":"ContainerStarted","Data":"11b2ebfd1de5e8093cd8d21e6fd1a79136b729be34d580629873efdf0da9085f"} Jan 26 00:10:17 crc kubenswrapper[4975]: I0126 00:10:17.997045 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-s459q" podStartSLOduration=182.997028521 podStartE2EDuration="3m2.997028521s" podCreationTimestamp="2026-01-26 00:07:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:10:17.995698125 +0000 UTC m=+202.116903619" watchObservedRunningTime="2026-01-26 00:10:17.997028521 +0000 UTC m=+202.118234015" Jan 26 00:10:18 crc kubenswrapper[4975]: I0126 00:10:18.985521 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"93092266-4d3b-451b-bf92-c033c3f62937","Type":"ContainerStarted","Data":"99a42aad561e716756033854100915474cdfd124b56037b58dbec999fcde86d6"} Jan 26 00:10:18 crc kubenswrapper[4975]: I0126 00:10:18.988197 4975 generic.go:334] "Generic (PLEG): container finished" podID="1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" containerID="31d9c0d7a8ae025cf5f4d10f9fd1675e57b84d13bb61eeb1b1855f2359f94a39" exitCode=0 Jan 26 00:10:18 crc kubenswrapper[4975]: I0126 00:10:18.988338 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910","Type":"ContainerDied","Data":"31d9c0d7a8ae025cf5f4d10f9fd1675e57b84d13bb61eeb1b1855f2359f94a39"} Jan 26 00:10:19 crc kubenswrapper[4975]: I0126 00:10:19.006749 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=5.006705944 podStartE2EDuration="5.006705944s" podCreationTimestamp="2026-01-26 00:10:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:10:19.001179675 +0000 UTC m=+203.122385179" watchObservedRunningTime="2026-01-26 00:10:19.006705944 +0000 UTC m=+203.127911438" Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.280170 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.370949 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir\") pod \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.371148 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access\") pod \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\" (UID: \"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910\") " Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.371845 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" (UID: "1afa53e5-50fd-4fa7-95cf-3ed3f42d9910"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.376855 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" (UID: "1afa53e5-50fd-4fa7-95cf-3ed3f42d9910"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.473340 4975 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:20 crc kubenswrapper[4975]: I0126 00:10:20.473378 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1afa53e5-50fd-4fa7-95cf-3ed3f42d9910-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:21 crc kubenswrapper[4975]: I0126 00:10:20.999934 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"1afa53e5-50fd-4fa7-95cf-3ed3f42d9910","Type":"ContainerDied","Data":"b0a42c6665d1bf1833b1b2a50d3b02f5b85d66c1eaadbff6f9813e17204598ae"} Jan 26 00:10:21 crc kubenswrapper[4975]: I0126 00:10:20.999973 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b0a42c6665d1bf1833b1b2a50d3b02f5b85d66c1eaadbff6f9813e17204598ae" Jan 26 00:10:21 crc kubenswrapper[4975]: I0126 00:10:20.999991 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 26 00:10:36 crc kubenswrapper[4975]: I0126 00:10:36.247828 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerStarted","Data":"f45ffb0e6ed2357590570d0c4909dd7fcf20309a12d96eb94bc506f6b0bfac74"} Jan 26 00:10:37 crc kubenswrapper[4975]: I0126 00:10:37.254897 4975 generic.go:334] "Generic (PLEG): container finished" podID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerID="f45ffb0e6ed2357590570d0c4909dd7fcf20309a12d96eb94bc506f6b0bfac74" exitCode=0 Jan 26 00:10:37 crc kubenswrapper[4975]: I0126 00:10:37.255044 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerDied","Data":"f45ffb0e6ed2357590570d0c4909dd7fcf20309a12d96eb94bc506f6b0bfac74"} Jan 26 00:10:40 crc kubenswrapper[4975]: I0126 00:10:40.481747 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:10:40 crc kubenswrapper[4975]: I0126 00:10:40.482108 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:10:40 crc kubenswrapper[4975]: I0126 00:10:40.482160 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:10:40 crc kubenswrapper[4975]: I0126 00:10:40.482726 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:10:40 crc kubenswrapper[4975]: I0126 00:10:40.482839 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283" gracePeriod=600 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.284956 4975 generic.go:334] "Generic (PLEG): container finished" podID="518ddce4-70cd-4aca-a096-37237d16dd76" containerID="a85dc64d3bdddd4709c9363859916bcd077d0d68f731164ca9055c4102a27357" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.285029 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerDied","Data":"a85dc64d3bdddd4709c9363859916bcd077d0d68f731164ca9055c4102a27357"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.290349 4975 generic.go:334] "Generic (PLEG): container finished" podID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerID="288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.290424 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerDied","Data":"288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.294665 4975 generic.go:334] "Generic (PLEG): container finished" podID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerID="4aefc0594555d85e74476c0312961e8d874048c7b613e5fdf398080635ad2630" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.294761 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerDied","Data":"4aefc0594555d85e74476c0312961e8d874048c7b613e5fdf398080635ad2630"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.299756 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerStarted","Data":"65ffe73db50818555831dbbefa69b6c19645067dadd859e1c23c3c95cbc6814b"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.302339 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.302427 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.302457 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.309863 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerStarted","Data":"46f383b39252462d9dbe51adc4566b94ee627ee5b6ce6db3a9542df9e8c16818"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.312184 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerStarted","Data":"865173e4a5399f41ba223e268caafca7636b8ea8c45a613a50b98f10539b01b1"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.320985 4975 generic.go:334] "Generic (PLEG): container finished" podID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerID="dc1c15f4a9f17edca85ab8e22a42d0ee9e039a584113f28e911956da87b4cbdf" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.321334 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerDied","Data":"dc1c15f4a9f17edca85ab8e22a42d0ee9e039a584113f28e911956da87b4cbdf"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.333693 4975 generic.go:334] "Generic (PLEG): container finished" podID="75c99291-b46b-4f76-9922-cd530cca51c9" containerID="17384369c8e019fbf81142ad4b744e8e76ad7cfe322098de76f9b4ebd6c716a9" exitCode=0 Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.333767 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerDied","Data":"17384369c8e019fbf81142ad4b744e8e76ad7cfe322098de76f9b4ebd6c716a9"} Jan 26 00:10:41 crc kubenswrapper[4975]: I0126 00:10:41.461477 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4b4kw" podStartSLOduration=4.445660692 podStartE2EDuration="1m13.461455545s" podCreationTimestamp="2026-01-26 00:09:28 +0000 UTC" firstStartedPulling="2026-01-26 00:09:31.215127582 +0000 UTC m=+155.336333066" lastFinishedPulling="2026-01-26 00:10:40.230922385 +0000 UTC m=+224.352127919" observedRunningTime="2026-01-26 00:10:41.456189484 +0000 UTC m=+225.577394978" watchObservedRunningTime="2026-01-26 00:10:41.461455545 +0000 UTC m=+225.582661039" Jan 26 00:10:42 crc kubenswrapper[4975]: I0126 00:10:42.341838 4975 generic.go:334] "Generic (PLEG): container finished" podID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerID="65ffe73db50818555831dbbefa69b6c19645067dadd859e1c23c3c95cbc6814b" exitCode=0 Jan 26 00:10:42 crc kubenswrapper[4975]: I0126 00:10:42.341887 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerDied","Data":"65ffe73db50818555831dbbefa69b6c19645067dadd859e1c23c3c95cbc6814b"} Jan 26 00:10:42 crc kubenswrapper[4975]: I0126 00:10:42.344002 4975 generic.go:334] "Generic (PLEG): container finished" podID="77eccefb-3716-4e9d-9807-059400c1c934" containerID="865173e4a5399f41ba223e268caafca7636b8ea8c45a613a50b98f10539b01b1" exitCode=0 Jan 26 00:10:42 crc kubenswrapper[4975]: I0126 00:10:42.344344 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerDied","Data":"865173e4a5399f41ba223e268caafca7636b8ea8c45a613a50b98f10539b01b1"} Jan 26 00:10:43 crc kubenswrapper[4975]: I0126 00:10:43.351830 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerStarted","Data":"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059"} Jan 26 00:10:43 crc kubenswrapper[4975]: I0126 00:10:43.374052 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-hmnjx" podStartSLOduration=3.341565139 podStartE2EDuration="1m13.374032624s" podCreationTimestamp="2026-01-26 00:09:30 +0000 UTC" firstStartedPulling="2026-01-26 00:09:32.235764067 +0000 UTC m=+156.356969561" lastFinishedPulling="2026-01-26 00:10:42.268231552 +0000 UTC m=+226.389437046" observedRunningTime="2026-01-26 00:10:43.368895726 +0000 UTC m=+227.490101240" watchObservedRunningTime="2026-01-26 00:10:43.374032624 +0000 UTC m=+227.495238108" Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.364391 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerStarted","Data":"170e622f13df7bbdb96f316809c498ecb5dc65fc6f3f604b9ec297f5a2dd331e"} Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.368806 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerStarted","Data":"d1a0c2da1197cd3e91d54bb1f69b1afdd279bc0b3cbe375736fb14a0e00df2cb"} Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.375585 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerStarted","Data":"ea7dd52c1c5e1bc10491603f99dce7377bae013ffbf214c456d53d624e42637c"} Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.378253 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerStarted","Data":"524896459d9791f303d686b1900a7057a3409dbde233d6ea277cd3cd023ded53"} Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.388227 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fccwg" podStartSLOduration=3.885691254 podStartE2EDuration="1m16.388199907s" podCreationTimestamp="2026-01-26 00:09:28 +0000 UTC" firstStartedPulling="2026-01-26 00:09:31.211898868 +0000 UTC m=+155.333104362" lastFinishedPulling="2026-01-26 00:10:43.714407531 +0000 UTC m=+227.835613015" observedRunningTime="2026-01-26 00:10:44.387706284 +0000 UTC m=+228.508911778" watchObservedRunningTime="2026-01-26 00:10:44.388199907 +0000 UTC m=+228.509405401" Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.406489 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lx8th" podStartSLOduration=4.11896644 podStartE2EDuration="1m16.406469218s" podCreationTimestamp="2026-01-26 00:09:28 +0000 UTC" firstStartedPulling="2026-01-26 00:09:30.17507708 +0000 UTC m=+154.296282574" lastFinishedPulling="2026-01-26 00:10:42.462579858 +0000 UTC m=+226.583785352" observedRunningTime="2026-01-26 00:10:44.404350571 +0000 UTC m=+228.525556065" watchObservedRunningTime="2026-01-26 00:10:44.406469218 +0000 UTC m=+228.527674712" Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.429720 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mmf4j" podStartSLOduration=2.652513845 podStartE2EDuration="1m15.429694741s" podCreationTimestamp="2026-01-26 00:09:29 +0000 UTC" firstStartedPulling="2026-01-26 00:09:31.211791385 +0000 UTC m=+155.332996879" lastFinishedPulling="2026-01-26 00:10:43.988972281 +0000 UTC m=+228.110177775" observedRunningTime="2026-01-26 00:10:44.424227894 +0000 UTC m=+228.545433378" watchObservedRunningTime="2026-01-26 00:10:44.429694741 +0000 UTC m=+228.550900235" Jan 26 00:10:44 crc kubenswrapper[4975]: I0126 00:10:44.447689 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7g6z6" podStartSLOduration=4.248076867 podStartE2EDuration="1m14.447664014s" podCreationTimestamp="2026-01-26 00:09:30 +0000 UTC" firstStartedPulling="2026-01-26 00:09:32.232238396 +0000 UTC m=+156.353443890" lastFinishedPulling="2026-01-26 00:10:42.431825523 +0000 UTC m=+226.553031037" observedRunningTime="2026-01-26 00:10:44.443528442 +0000 UTC m=+228.564733936" watchObservedRunningTime="2026-01-26 00:10:44.447664014 +0000 UTC m=+228.568869508" Jan 26 00:10:45 crc kubenswrapper[4975]: I0126 00:10:45.387206 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerStarted","Data":"ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa"} Jan 26 00:10:45 crc kubenswrapper[4975]: I0126 00:10:45.390525 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerStarted","Data":"ffe0da6a1a6506293c1e3556fe17620c5845c3a7e92b70358e5b163b7a1a7ce2"} Jan 26 00:10:45 crc kubenswrapper[4975]: I0126 00:10:45.404345 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nh5hc" podStartSLOduration=3.470898227 podStartE2EDuration="1m13.404329092s" podCreationTimestamp="2026-01-26 00:09:32 +0000 UTC" firstStartedPulling="2026-01-26 00:09:34.362897726 +0000 UTC m=+158.484103220" lastFinishedPulling="2026-01-26 00:10:44.296328601 +0000 UTC m=+228.417534085" observedRunningTime="2026-01-26 00:10:45.402816642 +0000 UTC m=+229.524022136" watchObservedRunningTime="2026-01-26 00:10:45.404329092 +0000 UTC m=+229.525534586" Jan 26 00:10:45 crc kubenswrapper[4975]: I0126 00:10:45.424839 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-n67bw" podStartSLOduration=3.66000398 podStartE2EDuration="1m14.424822542s" podCreationTimestamp="2026-01-26 00:09:31 +0000 UTC" firstStartedPulling="2026-01-26 00:09:33.318787519 +0000 UTC m=+157.439993013" lastFinishedPulling="2026-01-26 00:10:44.083606091 +0000 UTC m=+228.204811575" observedRunningTime="2026-01-26 00:10:45.419428998 +0000 UTC m=+229.540634492" watchObservedRunningTime="2026-01-26 00:10:45.424822542 +0000 UTC m=+229.546028036" Jan 26 00:10:48 crc kubenswrapper[4975]: I0126 00:10:48.817576 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:10:48 crc kubenswrapper[4975]: I0126 00:10:48.818066 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:10:48 crc kubenswrapper[4975]: I0126 00:10:48.950682 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.034281 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.034332 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.114363 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.298557 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.298611 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.365006 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.461150 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.461218 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.502146 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.584098 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.598111 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:10:49 crc kubenswrapper[4975]: I0126 00:10:49.602019 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:10:50 crc kubenswrapper[4975]: I0126 00:10:50.464306 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:50 crc kubenswrapper[4975]: I0126 00:10:50.816304 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrnhz"] Jan 26 00:10:50 crc kubenswrapper[4975]: I0126 00:10:50.958526 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:10:50 crc kubenswrapper[4975]: I0126 00:10:50.959169 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.014249 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.253541 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.255020 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.339816 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.592802 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.609371 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.721813 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4b4kw"] Jan 26 00:10:51 crc kubenswrapper[4975]: I0126 00:10:51.722087 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4b4kw" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="registry-server" containerID="cri-o://46f383b39252462d9dbe51adc4566b94ee627ee5b6ce6db3a9542df9e8c16818" gracePeriod=2 Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.051788 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.052147 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.235086 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.452415 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.452473 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.492867 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:10:52 crc kubenswrapper[4975]: I0126 00:10:52.497439 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:10:53 crc kubenswrapper[4975]: I0126 00:10:53.482650 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:10:53 crc kubenswrapper[4975]: I0126 00:10:53.523859 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mmf4j"] Jan 26 00:10:53 crc kubenswrapper[4975]: I0126 00:10:53.524155 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mmf4j" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="registry-server" containerID="cri-o://524896459d9791f303d686b1900a7057a3409dbde233d6ea277cd3cd023ded53" gracePeriod=2 Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.123691 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7g6z6"] Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.447907 4975 generic.go:334] "Generic (PLEG): container finished" podID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerID="46f383b39252462d9dbe51adc4566b94ee627ee5b6ce6db3a9542df9e8c16818" exitCode=0 Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.448025 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerDied","Data":"46f383b39252462d9dbe51adc4566b94ee627ee5b6ce6db3a9542df9e8c16818"} Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.451525 4975 generic.go:334] "Generic (PLEG): container finished" podID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerID="524896459d9791f303d686b1900a7057a3409dbde233d6ea277cd3cd023ded53" exitCode=0 Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.451575 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerDied","Data":"524896459d9791f303d686b1900a7057a3409dbde233d6ea277cd3cd023ded53"} Jan 26 00:10:54 crc kubenswrapper[4975]: I0126 00:10:54.451802 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7g6z6" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="registry-server" containerID="cri-o://ea7dd52c1c5e1bc10491603f99dce7377bae013ffbf214c456d53d624e42637c" gracePeriod=2 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.157646 4975 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.158157 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" containerName="pruner" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.158169 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" containerName="pruner" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.158369 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="1afa53e5-50fd-4fa7-95cf-3ed3f42d9910" containerName="pruner" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.158922 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159169 4975 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159801 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e" gracePeriod=15 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159856 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0" gracePeriod=15 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159828 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728" gracePeriod=15 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159830 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210" gracePeriod=15 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.159795 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5" gracePeriod=15 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161004 4975 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161237 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161248 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161255 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161261 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161277 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161283 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161290 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161296 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161311 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161317 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161328 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161333 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161438 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161450 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161457 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161463 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161473 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 26 00:10:56 crc kubenswrapper[4975]: E0126 00:10:56.161580 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.161587 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.162661 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.213608 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258644 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258686 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258707 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258744 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258782 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258813 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258833 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.258860 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.359681 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.359910 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360008 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.359810 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360158 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360219 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360303 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360403 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360502 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360566 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360515 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360436 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360692 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360931 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.360991 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.361109 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.520491 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nh5hc"] Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.520773 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nh5hc" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="registry-server" containerID="cri-o://ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" gracePeriod=2 Jan 26 00:10:56 crc kubenswrapper[4975]: I0126 00:10:56.543914 4975 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.585549 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:10:57 crc kubenswrapper[4975]: W0126 00:10:57.674603 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-e7027fc4c0201f3b1b4c353e82a6a980027961144ae149614696ef6aff7a1be0 WatchSource:0}: Error finding container e7027fc4c0201f3b1b4c353e82a6a980027961144ae149614696ef6aff7a1be0: Status 404 returned error can't find the container with id e7027fc4c0201f3b1b4c353e82a6a980027961144ae149614696ef6aff7a1be0 Jan 26 00:10:57 crc kubenswrapper[4975]: E0126 00:10:57.699245 4975 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.193:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e1f6d40922b4b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,LastTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.703506 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.704081 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.780339 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities\") pod \"d2aae40a-ded6-40de-a541-f22ef90f71e5\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.780395 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content\") pod \"d2aae40a-ded6-40de-a541-f22ef90f71e5\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.780418 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s92qd\" (UniqueName: \"kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd\") pod \"d2aae40a-ded6-40de-a541-f22ef90f71e5\" (UID: \"d2aae40a-ded6-40de-a541-f22ef90f71e5\") " Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.781812 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities" (OuterVolumeSpecName: "utilities") pod "d2aae40a-ded6-40de-a541-f22ef90f71e5" (UID: "d2aae40a-ded6-40de-a541-f22ef90f71e5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.784673 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd" (OuterVolumeSpecName: "kube-api-access-s92qd") pod "d2aae40a-ded6-40de-a541-f22ef90f71e5" (UID: "d2aae40a-ded6-40de-a541-f22ef90f71e5"). InnerVolumeSpecName "kube-api-access-s92qd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.881428 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:57 crc kubenswrapper[4975]: I0126 00:10:57.881764 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s92qd\" (UniqueName: \"kubernetes.io/projected/d2aae40a-ded6-40de-a541-f22ef90f71e5-kube-api-access-s92qd\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:58 crc kubenswrapper[4975]: E0126 00:10:58.532874 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:10:58 crc kubenswrapper[4975]: I0126 00:10:58.996441 4975 generic.go:334] "Generic (PLEG): container finished" podID="518ddce4-70cd-4aca-a096-37237d16dd76" containerID="ea7dd52c1c5e1bc10491603f99dce7377bae013ffbf214c456d53d624e42637c" exitCode=0 Jan 26 00:10:58 crc kubenswrapper[4975]: I0126 00:10:58.996619 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerDied","Data":"ea7dd52c1c5e1bc10491603f99dce7377bae013ffbf214c456d53d624e42637c"} Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.000356 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mmf4j" event={"ID":"d2aae40a-ded6-40de-a541-f22ef90f71e5","Type":"ContainerDied","Data":"e4206fb6373bb68ed40eb78deae9502889790049830617fb67a98412001375b8"} Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.000427 4975 scope.go:117] "RemoveContainer" containerID="524896459d9791f303d686b1900a7057a3409dbde233d6ea277cd3cd023ded53" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.000384 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mmf4j" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.001369 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.001873 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e7027fc4c0201f3b1b4c353e82a6a980027961144ae149614696ef6aff7a1be0"} Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.018312 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.019325 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.019831 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.024318 4975 scope.go:117] "RemoveContainer" containerID="4aefc0594555d85e74476c0312961e8d874048c7b613e5fdf398080635ad2630" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.043306 4975 scope.go:117] "RemoveContainer" containerID="1dfa539b39512da0673c2c1ef3e9d9f5e7c70eb9a1c87f8af0da41ca3ebf2c57" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.102853 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content\") pod \"af99b29a-8baa-405f-a1f1-84116ba167a8\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.102968 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities\") pod \"af99b29a-8baa-405f-a1f1-84116ba167a8\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.103058 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbtn4\" (UniqueName: \"kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4\") pod \"af99b29a-8baa-405f-a1f1-84116ba167a8\" (UID: \"af99b29a-8baa-405f-a1f1-84116ba167a8\") " Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.104006 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities" (OuterVolumeSpecName: "utilities") pod "af99b29a-8baa-405f-a1f1-84116ba167a8" (UID: "af99b29a-8baa-405f-a1f1-84116ba167a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.113758 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4" (OuterVolumeSpecName: "kube-api-access-cbtn4") pod "af99b29a-8baa-405f-a1f1-84116ba167a8" (UID: "af99b29a-8baa-405f-a1f1-84116ba167a8"). InnerVolumeSpecName "kube-api-access-cbtn4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.205504 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.205558 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbtn4\" (UniqueName: \"kubernetes.io/projected/af99b29a-8baa-405f-a1f1-84116ba167a8-kube-api-access-cbtn4\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.273666 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af99b29a-8baa-405f-a1f1-84116ba167a8" (UID: "af99b29a-8baa-405f-a1f1-84116ba167a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.306687 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af99b29a-8baa-405f-a1f1-84116ba167a8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.761335 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d2aae40a-ded6-40de-a541-f22ef90f71e5" (UID: "d2aae40a-ded6-40de-a541-f22ef90f71e5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.816884 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d2aae40a-ded6-40de-a541-f22ef90f71e5-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.915072 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:10:59 crc kubenswrapper[4975]: I0126 00:10:59.915335 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.009179 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4b4kw" event={"ID":"af99b29a-8baa-405f-a1f1-84116ba167a8","Type":"ContainerDied","Data":"bcbb87aa9e61296d3377987908fff7719189f190b7c73d06bcf0c5b1bb3d5c35"} Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.009248 4975 scope.go:117] "RemoveContainer" containerID="46f383b39252462d9dbe51adc4566b94ee627ee5b6ce6db3a9542df9e8c16818" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.009265 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4b4kw" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.010311 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.010862 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.011016 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-nh5hc_77eccefb-3716-4e9d-9807-059400c1c934/registry-server/0.log" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.011847 4975 generic.go:334] "Generic (PLEG): container finished" podID="77eccefb-3716-4e9d-9807-059400c1c934" containerID="ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" exitCode=137 Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.011907 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerDied","Data":"ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa"} Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.013528 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.014794 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.015402 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5" exitCode=0 Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.015424 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0" exitCode=0 Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.015433 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728" exitCode=2 Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.023678 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.023885 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.027832 4975 scope.go:117] "RemoveContainer" containerID="f45ffb0e6ed2357590570d0c4909dd7fcf20309a12d96eb94bc506f6b0bfac74" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.048240 4975 scope.go:117] "RemoveContainer" containerID="1f8497fee0a16151aa8b82259c9f2bbe80222175a533b596194e7dba5e0f47d4" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.080851 4975 scope.go:117] "RemoveContainer" containerID="80a51f7737370936533e63ecf2c76142c346b432d5336a10d03a1a33abcc1c66" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.374069 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.375090 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.375343 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.375604 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.427055 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities\") pod \"518ddce4-70cd-4aca-a096-37237d16dd76\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.427140 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content\") pod \"518ddce4-70cd-4aca-a096-37237d16dd76\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.427219 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jv9q8\" (UniqueName: \"kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8\") pod \"518ddce4-70cd-4aca-a096-37237d16dd76\" (UID: \"518ddce4-70cd-4aca-a096-37237d16dd76\") " Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.428609 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities" (OuterVolumeSpecName: "utilities") pod "518ddce4-70cd-4aca-a096-37237d16dd76" (UID: "518ddce4-70cd-4aca-a096-37237d16dd76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.432542 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8" (OuterVolumeSpecName: "kube-api-access-jv9q8") pod "518ddce4-70cd-4aca-a096-37237d16dd76" (UID: "518ddce4-70cd-4aca-a096-37237d16dd76"). InnerVolumeSpecName "kube-api-access-jv9q8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.448236 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "518ddce4-70cd-4aca-a096-37237d16dd76" (UID: "518ddce4-70cd-4aca-a096-37237d16dd76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.528795 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jv9q8\" (UniqueName: \"kubernetes.io/projected/518ddce4-70cd-4aca-a096-37237d16dd76-kube-api-access-jv9q8\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.528824 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.528836 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/518ddce4-70cd-4aca-a096-37237d16dd76-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.855695 4975 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.856099 4975 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.856384 4975 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.856666 4975 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.857026 4975 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:00 crc kubenswrapper[4975]: I0126 00:11:00.857099 4975 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 26 00:11:00 crc kubenswrapper[4975]: E0126 00:11:00.857392 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="200ms" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.027193 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.028492 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210" exitCode=0 Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.031939 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7g6z6" event={"ID":"518ddce4-70cd-4aca-a096-37237d16dd76","Type":"ContainerDied","Data":"df062c9b8e60c24edc5e7acacfff5cc175c6f1a9de7bce52ec7f2cc452ac7c66"} Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.032013 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7g6z6" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.032782 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.033244 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.033625 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.048144 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.048470 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: I0126 00:11:01.049136 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:01 crc kubenswrapper[4975]: E0126 00:11:01.058745 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="400ms" Jan 26 00:11:01 crc kubenswrapper[4975]: E0126 00:11:01.460319 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="800ms" Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.040296 4975 generic.go:334] "Generic (PLEG): container finished" podID="93092266-4d3b-451b-bf92-c033c3f62937" containerID="99a42aad561e716756033854100915474cdfd124b56037b58dbec999fcde86d6" exitCode=0 Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.040541 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"93092266-4d3b-451b-bf92-c033c3f62937","Type":"ContainerDied","Data":"99a42aad561e716756033854100915474cdfd124b56037b58dbec999fcde86d6"} Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.041564 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.042077 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.042296 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:02 crc kubenswrapper[4975]: I0126 00:11:02.042485 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:02 crc kubenswrapper[4975]: E0126 00:11:02.261624 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="1.6s" Jan 26 00:11:02 crc kubenswrapper[4975]: E0126 00:11:02.452114 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa is running failed: container process not found" containerID="ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:11:02 crc kubenswrapper[4975]: E0126 00:11:02.452998 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa is running failed: container process not found" containerID="ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:11:02 crc kubenswrapper[4975]: E0126 00:11:02.453359 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa is running failed: container process not found" containerID="ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:11:02 crc kubenswrapper[4975]: E0126 00:11:02.453405 4975 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-nh5hc" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="registry-server" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.306643 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.307129 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.307296 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.307440 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.307594 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.472701 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir\") pod \"93092266-4d3b-451b-bf92-c033c3f62937\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.473241 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access\") pod \"93092266-4d3b-451b-bf92-c033c3f62937\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.473488 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock\") pod \"93092266-4d3b-451b-bf92-c033c3f62937\" (UID: \"93092266-4d3b-451b-bf92-c033c3f62937\") " Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.472966 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "93092266-4d3b-451b-bf92-c033c3f62937" (UID: "93092266-4d3b-451b-bf92-c033c3f62937"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.473586 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock" (OuterVolumeSpecName: "var-lock") pod "93092266-4d3b-451b-bf92-c033c3f62937" (UID: "93092266-4d3b-451b-bf92-c033c3f62937"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.474556 4975 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.474893 4975 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/93092266-4d3b-451b-bf92-c033c3f62937-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.483022 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "93092266-4d3b-451b-bf92-c033c3f62937" (UID: "93092266-4d3b-451b-bf92-c033c3f62937"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:11:03 crc kubenswrapper[4975]: I0126 00:11:03.576055 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/93092266-4d3b-451b-bf92-c033c3f62937-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:03 crc kubenswrapper[4975]: E0126 00:11:03.862120 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="3.2s" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.056629 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.566328 4975 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e" exitCode=0 Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.568640 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"93092266-4d3b-451b-bf92-c033c3f62937","Type":"ContainerDied","Data":"7a35b177fe070cdeaf5f35e775496dbc63a8ca195e51c53730b5785a30ab3e23"} Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.568683 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a35b177fe070cdeaf5f35e775496dbc63a8ca195e51c53730b5785a30ab3e23" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.568723 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.570162 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"964ffd6cbcb6021ca062f0252be25c4131c03c66eadbbedfb8d22c196fe95e4a"} Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.572919 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.573599 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.574198 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:04 crc kubenswrapper[4975]: I0126 00:11:04.574555 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.366170 4975 scope.go:117] "RemoveContainer" containerID="ea7dd52c1c5e1bc10491603f99dce7377bae013ffbf214c456d53d624e42637c" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.429422 4975 scope.go:117] "RemoveContainer" containerID="a85dc64d3bdddd4709c9363859916bcd077d0d68f731164ca9055c4102a27357" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.430560 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-nh5hc_77eccefb-3716-4e9d-9807-059400c1c934/registry-server/0.log" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.431634 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.432410 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.433050 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.433382 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.433677 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.433948 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.442510 4975 scope.go:117] "RemoveContainer" containerID="7c2d6b68475e273f7ecacb80c49a8724d4312653dfd516ccc43c7108c905f870" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.580527 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-nh5hc_77eccefb-3716-4e9d-9807-059400c1c934/registry-server/0.log" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.581472 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nh5hc" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.581462 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nh5hc" event={"ID":"77eccefb-3716-4e9d-9807-059400c1c934","Type":"ContainerDied","Data":"45b87184c693dfa18f337a0b27aa36465406aa3cfb88602681d77e6a0a902465"} Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.581606 4975 scope.go:117] "RemoveContainer" containerID="ae620f14657f124f669ea0e6a99b5740180e132f4742fea45c6637024057edfa" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.583244 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.583933 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.584821 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.585358 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.585837 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.586477 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.598363 4975 scope.go:117] "RemoveContainer" containerID="865173e4a5399f41ba223e268caafca7636b8ea8c45a613a50b98f10539b01b1" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.602883 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rg988\" (UniqueName: \"kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988\") pod \"77eccefb-3716-4e9d-9807-059400c1c934\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.602966 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content\") pod \"77eccefb-3716-4e9d-9807-059400c1c934\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.603010 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities\") pod \"77eccefb-3716-4e9d-9807-059400c1c934\" (UID: \"77eccefb-3716-4e9d-9807-059400c1c934\") " Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.604925 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities" (OuterVolumeSpecName: "utilities") pod "77eccefb-3716-4e9d-9807-059400c1c934" (UID: "77eccefb-3716-4e9d-9807-059400c1c934"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.608380 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988" (OuterVolumeSpecName: "kube-api-access-rg988") pod "77eccefb-3716-4e9d-9807-059400c1c934" (UID: "77eccefb-3716-4e9d-9807-059400c1c934"). InnerVolumeSpecName "kube-api-access-rg988". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.645676 4975 scope.go:117] "RemoveContainer" containerID="7b558a412cb8a98230bc192ccee476485fd31bec7979a674150cbd55c152d9ab" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.704017 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rg988\" (UniqueName: \"kubernetes.io/projected/77eccefb-3716-4e9d-9807-059400c1c934-kube-api-access-rg988\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.704123 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.892212 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "77eccefb-3716-4e9d-9807-059400c1c934" (UID: "77eccefb-3716-4e9d-9807-059400c1c934"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:11:05 crc kubenswrapper[4975]: I0126 00:11:05.906501 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/77eccefb-3716-4e9d-9807-059400c1c934-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.159382 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.159995 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.160669 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.161105 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.161346 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.185086 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.185307 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.185486 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.185665 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.185893 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: E0126 00:11:06.197446 4975 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.193:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e1f6d40922b4b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,LastTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.599026 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.599489 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.599956 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.600303 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.600606 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:06 crc kubenswrapper[4975]: I0126 00:11:06.601029 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:07 crc kubenswrapper[4975]: E0126 00:11:07.063538 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="6.4s" Jan 26 00:11:08 crc kubenswrapper[4975]: E0126 00:11:08.653267 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.332789 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.333856 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.334670 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.335418 4975 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.336675 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.337176 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.337591 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.337976 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.338437 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.455842 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.456015 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.456482 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.456663 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.456748 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.457305 4975 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.457486 4975 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.458275 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.558130 4975 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.619136 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.620103 4975 scope.go:117] "RemoveContainer" containerID="e0ce6dd30e16e6b044cdb6a78249de36256e725a6f10ffdacccb8155922defe5" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.620520 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.635241 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.635637 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.636145 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.636485 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.636770 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.637026 4975 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.637371 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.638180 4975 scope.go:117] "RemoveContainer" containerID="bb32af7e4e16f6a5dd16eab866a2c9dab1e9b1121e986349eabff1d8ae32a4d0" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.651440 4975 scope.go:117] "RemoveContainer" containerID="5c9e28478414be4dab6e94e64ac8786cd1b990aee31dd59cc4a6496b33c15210" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.664480 4975 scope.go:117] "RemoveContainer" containerID="c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.678524 4975 scope.go:117] "RemoveContainer" containerID="6904d2526b85719b101096bcd55ce36651bd04e6c8859a85e4fca88764b5938e" Jan 26 00:11:09 crc kubenswrapper[4975]: I0126 00:11:09.693858 4975 scope.go:117] "RemoveContainer" containerID="e3920ce393754ccef6ef4b360380a4c919739e538b7dab8b176b595e993c1245" Jan 26 00:11:10 crc kubenswrapper[4975]: I0126 00:11:10.154256 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.642725 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.643043 4975 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9" exitCode=1 Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.643077 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9"} Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.643597 4975 scope.go:117] "RemoveContainer" containerID="e588f2c6a1ba5604c037777eabd5109293027f9ceb0ba2422b0c861cbe1ac0f9" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.644535 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.644836 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.645046 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.645219 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.645384 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.645634 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:12 crc kubenswrapper[4975]: I0126 00:11:12.645831 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: E0126 00:11:13.465233 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="7s" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.662354 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.662448 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7b45311d8ac35311c73f7c2bdc0ff12f2ef5c34f3be51d5acdd407109b20fce1"} Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.663971 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.664460 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.664955 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.665393 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.665971 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.666389 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:13 crc kubenswrapper[4975]: I0126 00:11:13.666820 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:14 crc kubenswrapper[4975]: I0126 00:11:14.517327 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:11:14 crc kubenswrapper[4975]: I0126 00:11:14.517607 4975 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 26 00:11:14 crc kubenswrapper[4975]: I0126 00:11:14.517701 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 26 00:11:15 crc kubenswrapper[4975]: E0126 00:11:15.233899 4975 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.193:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" volumeName="registry-storage" Jan 26 00:11:15 crc kubenswrapper[4975]: I0126 00:11:15.846034 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerName="oauth-openshift" containerID="cri-o://97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39" gracePeriod=15 Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.151868 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.152531 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.153014 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.153567 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.154075 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.154466 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.156724 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: E0126 00:11:16.199232 4975 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.193:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188e1f6d40922b4b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,LastTimestamp:2026-01-26 00:10:57.698401099 +0000 UTC m=+241.819606593,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.255663 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.256295 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.256760 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.256992 4975 status_manager.go:851] "Failed to get status for pod" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrnhz\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.257191 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.257406 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.257580 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.257833 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.258153 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349083 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349119 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349138 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349159 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349180 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349204 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349223 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349241 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349271 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-67hdw\" (UniqueName: \"kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349288 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349306 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349333 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349351 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.349372 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig\") pod \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\" (UID: \"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9\") " Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.350201 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.350226 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.350298 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.350952 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.351337 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.356100 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.357162 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw" (OuterVolumeSpecName: "kube-api-access-67hdw") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "kube-api-access-67hdw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.357698 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.358366 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.358950 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.359467 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.359794 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.363351 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.364049 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" (UID: "73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450103 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450130 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450140 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450150 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-67hdw\" (UniqueName: \"kubernetes.io/projected/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-kube-api-access-67hdw\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450161 4975 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450170 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450179 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450189 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450198 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450207 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450215 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450225 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450235 4975 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.450244 4975 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.684916 4975 generic.go:334] "Generic (PLEG): container finished" podID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerID="97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39" exitCode=0 Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.684978 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" event={"ID":"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9","Type":"ContainerDied","Data":"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39"} Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.685015 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" event={"ID":"73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9","Type":"ContainerDied","Data":"5e930fad3bf4b0782e6d15c46bec0dfa8366bafb54ffc7d22be418c815d3759a"} Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.685043 4975 scope.go:117] "RemoveContainer" containerID="97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.685215 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.686476 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.687047 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.687244 4975 status_manager.go:851] "Failed to get status for pod" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrnhz\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.687433 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.687654 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.687928 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.688268 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.688525 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.710633 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.711227 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.711855 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.712212 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.712617 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.713046 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.713341 4975 status_manager.go:851] "Failed to get status for pod" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrnhz\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.713713 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.720281 4975 scope.go:117] "RemoveContainer" containerID="97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39" Jan 26 00:11:16 crc kubenswrapper[4975]: E0126 00:11:16.721138 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39\": container with ID starting with 97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39 not found: ID does not exist" containerID="97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39" Jan 26 00:11:16 crc kubenswrapper[4975]: I0126 00:11:16.721198 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39"} err="failed to get container status \"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39\": rpc error: code = NotFound desc = could not find container \"97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39\": container with ID starting with 97672d42752f3182147d0c1b3a8ae214e10381b0a24ae1c57606221e18b05f39 not found: ID does not exist" Jan 26 00:11:18 crc kubenswrapper[4975]: E0126 00:11:18.786895 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:11:20 crc kubenswrapper[4975]: I0126 00:11:20.277177 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:11:20 crc kubenswrapper[4975]: E0126 00:11:20.466646 4975 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.193:6443: connect: connection refused" interval="7s" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.146343 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.147781 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.148342 4975 status_manager.go:851] "Failed to get status for pod" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrnhz\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.148867 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.149395 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.149964 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.150457 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.150758 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.150993 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.160514 4975 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.160539 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:23 crc kubenswrapper[4975]: E0126 00:11:23.160968 4975 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.161514 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.730227 4975 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="afad10f76a28781d81a260a0900cc47b2f83979195283e4e0031f46dc998cdf2" exitCode=0 Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.730297 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"afad10f76a28781d81a260a0900cc47b2f83979195283e4e0031f46dc998cdf2"} Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.730373 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a191c0b4fa2554e65143795e5af85834811076525cff6e25ab8bda7fc6ae85af"} Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.730992 4975 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.731034 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.731528 4975 status_manager.go:851] "Failed to get status for pod" podUID="93092266-4d3b-451b-bf92-c033c3f62937" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: E0126 00:11:23.731655 4975 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.732044 4975 status_manager.go:851] "Failed to get status for pod" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" pod="openshift-marketplace/redhat-marketplace-7g6z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-7g6z6\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.732428 4975 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.732817 4975 status_manager.go:851] "Failed to get status for pod" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" pod="openshift-marketplace/community-operators-4b4kw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-4b4kw\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.733238 4975 status_manager.go:851] "Failed to get status for pod" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" pod="openshift-authentication/oauth-openshift-558db77b4-xrnhz" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-xrnhz\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.733676 4975 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.734195 4975 status_manager.go:851] "Failed to get status for pod" podUID="77eccefb-3716-4e9d-9807-059400c1c934" pod="openshift-marketplace/redhat-operators-nh5hc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-nh5hc\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:23 crc kubenswrapper[4975]: I0126 00:11:23.734712 4975 status_manager.go:851] "Failed to get status for pod" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" pod="openshift-marketplace/certified-operators-mmf4j" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-mmf4j\": dial tcp 38.102.83.193:6443: connect: connection refused" Jan 26 00:11:24 crc kubenswrapper[4975]: I0126 00:11:24.523694 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:11:24 crc kubenswrapper[4975]: I0126 00:11:24.527826 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 26 00:11:24 crc kubenswrapper[4975]: I0126 00:11:24.742393 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3ccc8c5a284a8b41af0538c2d66a42208904c5fe51a453aa693c0fb3c6bcce17"} Jan 26 00:11:24 crc kubenswrapper[4975]: I0126 00:11:24.742433 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1d809ae7b863b724db902358be5960470629c7c36da613c56db33347fbcf03cf"} Jan 26 00:11:24 crc kubenswrapper[4975]: I0126 00:11:24.742445 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"656e579c2968623ecc274b5653f57116d3a10319a94ce9aaa3d11c5854608e9e"} Jan 26 00:11:25 crc kubenswrapper[4975]: I0126 00:11:25.750657 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b71cf483322d36c7ccf0c90b1c3db2129e211923f10170811f4e6b49cb002965"} Jan 26 00:11:25 crc kubenswrapper[4975]: I0126 00:11:25.750967 4975 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:25 crc kubenswrapper[4975]: I0126 00:11:25.750992 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:25 crc kubenswrapper[4975]: I0126 00:11:25.750979 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6e246a8054fcf6485906a5163821ca5487d2a78c845857b821bdb44df7b5f780"} Jan 26 00:11:25 crc kubenswrapper[4975]: I0126 00:11:25.751013 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:28 crc kubenswrapper[4975]: I0126 00:11:28.161971 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:28 crc kubenswrapper[4975]: I0126 00:11:28.162478 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:28 crc kubenswrapper[4975]: I0126 00:11:28.167027 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:28 crc kubenswrapper[4975]: E0126 00:11:28.916512 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:11:30 crc kubenswrapper[4975]: I0126 00:11:30.759144 4975 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:30 crc kubenswrapper[4975]: I0126 00:11:30.780587 4975 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:30 crc kubenswrapper[4975]: I0126 00:11:30.780649 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:30 crc kubenswrapper[4975]: I0126 00:11:30.784785 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:11:30 crc kubenswrapper[4975]: I0126 00:11:30.798958 4975 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="afce83d4-0b05-49d5-8fe1-817df345c419" Jan 26 00:11:31 crc kubenswrapper[4975]: I0126 00:11:31.784820 4975 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:31 crc kubenswrapper[4975]: I0126 00:11:31.784860 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d260a77f-a97d-4771-92cd-ebd476f99134" Jan 26 00:11:31 crc kubenswrapper[4975]: I0126 00:11:31.788059 4975 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="afce83d4-0b05-49d5-8fe1-817df345c419" Jan 26 00:11:39 crc kubenswrapper[4975]: E0126 00:11:39.038647 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:11:49 crc kubenswrapper[4975]: E0126 00:11:49.159834 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-c1a626af3a8204181f0aff6f6ac7a1243e14740d32a1fee77d9508710e9af728.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:11:56 crc kubenswrapper[4975]: I0126 00:11:56.031298 4975 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 26 00:11:56 crc kubenswrapper[4975]: I0126 00:11:56.118006 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 26 00:11:57 crc kubenswrapper[4975]: I0126 00:11:57.976872 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 26 00:11:58 crc kubenswrapper[4975]: I0126 00:11:58.011827 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 26 00:11:58 crc kubenswrapper[4975]: I0126 00:11:58.397015 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 26 00:11:58 crc kubenswrapper[4975]: I0126 00:11:58.945916 4975 generic.go:334] "Generic (PLEG): container finished" podID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerID="acfcb02a48dc4fad3f9254b4b0163b3a9005fded7aa35e8771ad167ae351b945" exitCode=0 Jan 26 00:11:58 crc kubenswrapper[4975]: I0126 00:11:58.945964 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerDied","Data":"acfcb02a48dc4fad3f9254b4b0163b3a9005fded7aa35e8771ad167ae351b945"} Jan 26 00:11:58 crc kubenswrapper[4975]: I0126 00:11:58.946503 4975 scope.go:117] "RemoveContainer" containerID="acfcb02a48dc4fad3f9254b4b0163b3a9005fded7aa35e8771ad167ae351b945" Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.261330 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.769165 4975 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.783488 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.952693 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/1.log" Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.953233 4975 generic.go:334] "Generic (PLEG): container finished" podID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerID="543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53" exitCode=1 Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.953281 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerDied","Data":"543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53"} Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.953320 4975 scope.go:117] "RemoveContainer" containerID="acfcb02a48dc4fad3f9254b4b0163b3a9005fded7aa35e8771ad167ae351b945" Jan 26 00:11:59 crc kubenswrapper[4975]: I0126 00:11:59.954166 4975 scope.go:117] "RemoveContainer" containerID="543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53" Jan 26 00:11:59 crc kubenswrapper[4975]: E0126 00:11:59.954618 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:00 crc kubenswrapper[4975]: I0126 00:12:00.461522 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 26 00:12:00 crc kubenswrapper[4975]: I0126 00:12:00.619333 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 26 00:12:00 crc kubenswrapper[4975]: I0126 00:12:00.653607 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 26 00:12:00 crc kubenswrapper[4975]: I0126 00:12:00.965162 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/1.log" Jan 26 00:12:01 crc kubenswrapper[4975]: I0126 00:12:01.130369 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 26 00:12:01 crc kubenswrapper[4975]: I0126 00:12:01.368882 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:12:01 crc kubenswrapper[4975]: I0126 00:12:01.368968 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:12:01 crc kubenswrapper[4975]: I0126 00:12:01.369674 4975 scope.go:117] "RemoveContainer" containerID="543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53" Jan 26 00:12:01 crc kubenswrapper[4975]: E0126 00:12:01.369975 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:01 crc kubenswrapper[4975]: I0126 00:12:01.454837 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.223524 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.513728 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.556394 4975 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.692879 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.701950 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 00:12:02 crc kubenswrapper[4975]: I0126 00:12:02.893066 4975 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 26 00:12:03 crc kubenswrapper[4975]: I0126 00:12:03.097850 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 26 00:12:03 crc kubenswrapper[4975]: I0126 00:12:03.319499 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 26 00:12:03 crc kubenswrapper[4975]: I0126 00:12:03.642184 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 26 00:12:03 crc kubenswrapper[4975]: I0126 00:12:03.868563 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.041921 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.083001 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.214339 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.253081 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.263850 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.336590 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.379622 4975 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.437221 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.577812 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.650377 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 26 00:12:04 crc kubenswrapper[4975]: I0126 00:12:04.972943 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.259014 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.338969 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.567564 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.682663 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.687957 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.698637 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.734164 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.874535 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.896185 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 00:12:05 crc kubenswrapper[4975]: I0126 00:12:05.956944 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.269948 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.445178 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.451951 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.609887 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.787943 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.813444 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 26 00:12:06 crc kubenswrapper[4975]: I0126 00:12:06.864911 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.031498 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.105962 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.135306 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.406823 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.422393 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.466992 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.505793 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.519511 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.545981 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.632187 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.728859 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 26 00:12:07 crc kubenswrapper[4975]: I0126 00:12:07.993824 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.020339 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.065890 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.135439 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.151954 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.190305 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.488265 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.610925 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.759723 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.868755 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.952916 4975 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.956695 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=72.956666064 podStartE2EDuration="1m12.956666064s" podCreationTimestamp="2026-01-26 00:10:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:11:30.6789186 +0000 UTC m=+274.800124094" watchObservedRunningTime="2026-01-26 00:12:08.956666064 +0000 UTC m=+313.077871568" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.960281 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-xrnhz","openshift-marketplace/redhat-operators-nh5hc","openshift-marketplace/redhat-marketplace-7g6z6","openshift-marketplace/community-operators-4b4kw","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-mmf4j"] Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.960398 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.965268 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 26 00:12:08 crc kubenswrapper[4975]: I0126 00:12:08.990369 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=38.990338153 podStartE2EDuration="38.990338153s" podCreationTimestamp="2026-01-26 00:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:12:08.98055804 +0000 UTC m=+313.101763534" watchObservedRunningTime="2026-01-26 00:12:08.990338153 +0000 UTC m=+313.111543647" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.214092 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.459423 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.506199 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.572462 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.677464 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.720846 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.895543 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 26 00:12:09 crc kubenswrapper[4975]: I0126 00:12:09.929004 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.019920 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.153967 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" path="/var/lib/kubelet/pods/518ddce4-70cd-4aca-a096-37237d16dd76/volumes" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.154988 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" path="/var/lib/kubelet/pods/73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9/volumes" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.155539 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77eccefb-3716-4e9d-9807-059400c1c934" path="/var/lib/kubelet/pods/77eccefb-3716-4e9d-9807-059400c1c934/volumes" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.156752 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" path="/var/lib/kubelet/pods/af99b29a-8baa-405f-a1f1-84116ba167a8/volumes" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.157394 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" path="/var/lib/kubelet/pods/d2aae40a-ded6-40de-a541-f22ef90f71e5/volumes" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.256196 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.284714 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.488612 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.530401 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 26 00:12:10 crc kubenswrapper[4975]: I0126 00:12:10.869600 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 00:12:11 crc kubenswrapper[4975]: I0126 00:12:11.147113 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 26 00:12:11 crc kubenswrapper[4975]: I0126 00:12:11.152714 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 00:12:11 crc kubenswrapper[4975]: I0126 00:12:11.213709 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 26 00:12:11 crc kubenswrapper[4975]: I0126 00:12:11.622519 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 26 00:12:11 crc kubenswrapper[4975]: I0126 00:12:11.966799 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.138869 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.175554 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pjjrb"] Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.175933 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.175991 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176012 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176026 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176051 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176064 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176079 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176095 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176110 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176138 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176158 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176171 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176188 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93092266-4d3b-451b-bf92-c033c3f62937" containerName="installer" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176200 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="93092266-4d3b-451b-bf92-c033c3f62937" containerName="installer" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176221 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176234 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176250 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176262 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176279 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176292 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="extract-utilities" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176313 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176326 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="extract-content" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176348 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176360 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176375 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176388 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: E0126 00:12:12.176416 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerName="oauth-openshift" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176429 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerName="oauth-openshift" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176604 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="77eccefb-3716-4e9d-9807-059400c1c934" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176630 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="93092266-4d3b-451b-bf92-c033c3f62937" containerName="installer" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176645 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="d2aae40a-ded6-40de-a541-f22ef90f71e5" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176671 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="af99b29a-8baa-405f-a1f1-84116ba167a8" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176689 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="73e63d39-8fb5-4d36-bb64-fdb6cfb6d2e9" containerName="oauth-openshift" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.176703 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="518ddce4-70cd-4aca-a096-37237d16dd76" containerName="registry-server" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.177387 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.181426 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.181454 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.182235 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.182288 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.183702 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.183855 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.184222 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.184999 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.185255 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.185474 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.185531 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.187083 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.197790 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.198552 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.227086 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.229978 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.328725 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.328839 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.329125 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.329227 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.329322 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.329436 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tq9z\" (UniqueName: \"kubernetes.io/projected/0efc05fb-4197-45b7-8411-b7453ec9ae00-kube-api-access-8tq9z\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.329514 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-policies\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330199 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330248 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330285 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-dir\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330319 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330405 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330427 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.330458 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.432818 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-policies\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.432908 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tq9z\" (UniqueName: \"kubernetes.io/projected/0efc05fb-4197-45b7-8411-b7453ec9ae00-kube-api-access-8tq9z\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.432968 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433007 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433051 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-dir\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433089 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433294 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-dir\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433903 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.433989 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-audit-policies\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434052 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434280 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434480 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434567 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434645 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-cliconfig\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434679 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434778 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.434859 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.435638 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.436591 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-service-ca\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.442332 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.442827 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-router-certs\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.443398 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.443461 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.443406 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-login\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.443653 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-serving-cert\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.444097 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-system-session\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.445243 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0efc05fb-4197-45b7-8411-b7453ec9ae00-v4-0-config-user-template-error\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.468767 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tq9z\" (UniqueName: \"kubernetes.io/projected/0efc05fb-4197-45b7-8411-b7453ec9ae00-kube-api-access-8tq9z\") pod \"oauth-openshift-76766fc778-pjjrb\" (UID: \"0efc05fb-4197-45b7-8411-b7453ec9ae00\") " pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.518353 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.531923 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.552607 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.659117 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.814049 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.895562 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 00:12:12 crc kubenswrapper[4975]: I0126 00:12:12.908610 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.043870 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.051701 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.106037 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.279536 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.304857 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.416232 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.561695 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.656070 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.670851 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.802314 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 26 00:12:13 crc kubenswrapper[4975]: I0126 00:12:13.876612 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.011034 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.043827 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.062416 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.148214 4975 scope.go:117] "RemoveContainer" containerID="543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.431380 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.478865 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.857487 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.893584 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 26 00:12:14 crc kubenswrapper[4975]: I0126 00:12:14.904796 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.012015 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.056421 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/1.log" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.056477 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerStarted","Data":"8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e"} Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.056775 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.057624 4975 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-8xrbd container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.057701 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.191974 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.255030 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.334204 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.417459 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.521824 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.569604 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.575802 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.745341 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.767108 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.855469 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.874756 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.911256 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.925693 4975 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.926082 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://964ffd6cbcb6021ca062f0252be25c4131c03c66eadbbedfb8d22c196fe95e4a" gracePeriod=5 Jan 26 00:12:15 crc kubenswrapper[4975]: I0126 00:12:15.940286 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.021585 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.038005 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.063569 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/2.log" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.064605 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/1.log" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.064715 4975 generic.go:334] "Generic (PLEG): container finished" podID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" exitCode=1 Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.064806 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerDied","Data":"8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e"} Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.064873 4975 scope.go:117] "RemoveContainer" containerID="543d4ced6375b010f6b9f1c3c6be0dc663ef7c138544ba705c09c021e3a13e53" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.065416 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:12:16 crc kubenswrapper[4975]: E0126 00:12:16.065660 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.495316 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.580209 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.759086 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.850444 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 26 00:12:16 crc kubenswrapper[4975]: I0126 00:12:16.898726 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.058440 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.072431 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/2.log" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.073283 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:12:17 crc kubenswrapper[4975]: E0126 00:12:17.073704 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.344046 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.347089 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pjjrb"] Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.390489 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.417164 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.808425 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-76766fc778-pjjrb"] Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.904128 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 26 00:12:17 crc kubenswrapper[4975]: I0126 00:12:17.928438 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.002800 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.083503 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" event={"ID":"0efc05fb-4197-45b7-8411-b7453ec9ae00","Type":"ContainerStarted","Data":"b10e03ba4f08239ce1d9123d0dfdf8a5068da47d84a60860e5928a91472c0b6d"} Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.238999 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.246142 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.443591 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.595036 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.652710 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.764272 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.769884 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 26 00:12:18 crc kubenswrapper[4975]: I0126 00:12:18.937861 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.025859 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.099077 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.320553 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.362579 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.691487 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.741849 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.798448 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.840805 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.931921 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 26 00:12:19 crc kubenswrapper[4975]: I0126 00:12:19.967558 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.098904 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-76766fc778-pjjrb_0efc05fb-4197-45b7-8411-b7453ec9ae00/oauth-openshift/0.log" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.098955 4975 generic.go:334] "Generic (PLEG): container finished" podID="0efc05fb-4197-45b7-8411-b7453ec9ae00" containerID="61afa891082ee3bfe9ab3db260030af0bed6efc0692cf8bbd3e65e4bdbf4d974" exitCode=255 Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.098987 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" event={"ID":"0efc05fb-4197-45b7-8411-b7453ec9ae00","Type":"ContainerDied","Data":"61afa891082ee3bfe9ab3db260030af0bed6efc0692cf8bbd3e65e4bdbf4d974"} Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.099559 4975 scope.go:117] "RemoveContainer" containerID="61afa891082ee3bfe9ab3db260030af0bed6efc0692cf8bbd3e65e4bdbf4d974" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.145526 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.207102 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.433235 4975 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.468813 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.591023 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.668999 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.679816 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.805224 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 26 00:12:20 crc kubenswrapper[4975]: I0126 00:12:20.808414 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.087716 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.107398 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.107473 4975 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="964ffd6cbcb6021ca062f0252be25c4131c03c66eadbbedfb8d22c196fe95e4a" exitCode=137 Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.110017 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-76766fc778-pjjrb_0efc05fb-4197-45b7-8411-b7453ec9ae00/oauth-openshift/1.log" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.110780 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-76766fc778-pjjrb_0efc05fb-4197-45b7-8411-b7453ec9ae00/oauth-openshift/0.log" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.110836 4975 generic.go:334] "Generic (PLEG): container finished" podID="0efc05fb-4197-45b7-8411-b7453ec9ae00" containerID="897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37" exitCode=255 Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.110870 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" event={"ID":"0efc05fb-4197-45b7-8411-b7453ec9ae00","Type":"ContainerDied","Data":"897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37"} Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.110919 4975 scope.go:117] "RemoveContainer" containerID="61afa891082ee3bfe9ab3db260030af0bed6efc0692cf8bbd3e65e4bdbf4d974" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.111767 4975 scope.go:117] "RemoveContainer" containerID="897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37" Jan 26 00:12:21 crc kubenswrapper[4975]: E0126 00:12:21.112109 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-76766fc778-pjjrb_openshift-authentication(0efc05fb-4197-45b7-8411-b7453ec9ae00)\"" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" podUID="0efc05fb-4197-45b7-8411-b7453ec9ae00" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.142206 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.197920 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.251979 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.278301 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.297805 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.369053 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.371543 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:12:21 crc kubenswrapper[4975]: E0126 00:12:21.371957 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.432698 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.484594 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.615646 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.615800 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.650763 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815402 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815470 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815509 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815607 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815627 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815931 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.815981 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.816063 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.816261 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.825409 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.852202 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.917361 4975 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.917422 4975 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.917431 4975 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.917441 4975 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.917451 4975 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 26 00:12:21 crc kubenswrapper[4975]: I0126 00:12:21.971406 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.019954 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.119464 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.119629 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.119632 4975 scope.go:117] "RemoveContainer" containerID="964ffd6cbcb6021ca062f0252be25c4131c03c66eadbbedfb8d22c196fe95e4a" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.123131 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-76766fc778-pjjrb_0efc05fb-4197-45b7-8411-b7453ec9ae00/oauth-openshift/1.log" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.123510 4975 scope.go:117] "RemoveContainer" containerID="897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37" Jan 26 00:12:22 crc kubenswrapper[4975]: E0126 00:12:22.123887 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-76766fc778-pjjrb_openshift-authentication(0efc05fb-4197-45b7-8411-b7453ec9ae00)\"" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" podUID="0efc05fb-4197-45b7-8411-b7453ec9ae00" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.155916 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.156197 4975 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.156881 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.167789 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.167842 4975 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="632a5803-ed6a-4945-a32a-608543def2f0" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.171717 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.171803 4975 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="632a5803-ed6a-4945-a32a-608543def2f0" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.331382 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.519195 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.519295 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.616705 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 26 00:12:22 crc kubenswrapper[4975]: I0126 00:12:22.738309 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 26 00:12:23 crc kubenswrapper[4975]: I0126 00:12:23.059626 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 26 00:12:23 crc kubenswrapper[4975]: I0126 00:12:23.132442 4975 scope.go:117] "RemoveContainer" containerID="897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37" Jan 26 00:12:23 crc kubenswrapper[4975]: E0126 00:12:23.132698 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-76766fc778-pjjrb_openshift-authentication(0efc05fb-4197-45b7-8411-b7453ec9ae00)\"" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" podUID="0efc05fb-4197-45b7-8411-b7453ec9ae00" Jan 26 00:12:23 crc kubenswrapper[4975]: I0126 00:12:23.167754 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 00:12:23 crc kubenswrapper[4975]: I0126 00:12:23.310725 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 26 00:12:23 crc kubenswrapper[4975]: I0126 00:12:23.622823 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.013453 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.026075 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.155008 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.170609 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.251042 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.394227 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.513773 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.613999 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.660189 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 26 00:12:24 crc kubenswrapper[4975]: I0126 00:12:24.678820 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.248594 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.298710 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.383699 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.498425 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.529100 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.546033 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.555070 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.604863 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.629462 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.858564 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 26 00:12:25 crc kubenswrapper[4975]: I0126 00:12:25.890323 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 26 00:12:26 crc kubenswrapper[4975]: I0126 00:12:26.042223 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 26 00:12:26 crc kubenswrapper[4975]: I0126 00:12:26.106975 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 26 00:12:26 crc kubenswrapper[4975]: I0126 00:12:26.415550 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 00:12:26 crc kubenswrapper[4975]: I0126 00:12:26.663616 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 26 00:12:26 crc kubenswrapper[4975]: I0126 00:12:26.716434 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 26 00:12:27 crc kubenswrapper[4975]: I0126 00:12:27.260791 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 26 00:12:27 crc kubenswrapper[4975]: I0126 00:12:27.675141 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 26 00:12:27 crc kubenswrapper[4975]: I0126 00:12:27.840022 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 26 00:12:28 crc kubenswrapper[4975]: I0126 00:12:28.188904 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 26 00:12:28 crc kubenswrapper[4975]: I0126 00:12:28.590143 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 26 00:12:28 crc kubenswrapper[4975]: I0126 00:12:28.841749 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 26 00:12:29 crc kubenswrapper[4975]: I0126 00:12:29.150918 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 26 00:12:29 crc kubenswrapper[4975]: I0126 00:12:29.433705 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 26 00:12:30 crc kubenswrapper[4975]: I0126 00:12:30.038776 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 26 00:12:30 crc kubenswrapper[4975]: I0126 00:12:30.730345 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 26 00:12:31 crc kubenswrapper[4975]: I0126 00:12:31.250007 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 26 00:12:31 crc kubenswrapper[4975]: I0126 00:12:31.783199 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 26 00:12:31 crc kubenswrapper[4975]: I0126 00:12:31.877796 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 26 00:12:31 crc kubenswrapper[4975]: I0126 00:12:31.910594 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 26 00:12:32 crc kubenswrapper[4975]: I0126 00:12:32.400522 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 26 00:12:33 crc kubenswrapper[4975]: I0126 00:12:33.860020 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 26 00:12:33 crc kubenswrapper[4975]: I0126 00:12:33.921467 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 26 00:12:34 crc kubenswrapper[4975]: I0126 00:12:34.147879 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:12:34 crc kubenswrapper[4975]: E0126 00:12:34.148466 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-8xrbd_openshift-marketplace(fcf19955-9a00-4a50-8ce1-bd7098c45eec)\"" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" Jan 26 00:12:34 crc kubenswrapper[4975]: I0126 00:12:34.471388 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 26 00:12:38 crc kubenswrapper[4975]: I0126 00:12:38.146560 4975 scope.go:117] "RemoveContainer" containerID="897c289c732e9ca52821c00ae334b1f21094126bca18d132c90982a5a19f0d37" Jan 26 00:12:39 crc kubenswrapper[4975]: I0126 00:12:39.255989 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-76766fc778-pjjrb_0efc05fb-4197-45b7-8411-b7453ec9ae00/oauth-openshift/1.log" Jan 26 00:12:39 crc kubenswrapper[4975]: I0126 00:12:39.256325 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" event={"ID":"0efc05fb-4197-45b7-8411-b7453ec9ae00","Type":"ContainerStarted","Data":"7473480c3456c45aab7966f354ad5192d5b0ceb3fc5024594b9a962509f73897"} Jan 26 00:12:39 crc kubenswrapper[4975]: I0126 00:12:39.256644 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:39 crc kubenswrapper[4975]: I0126 00:12:39.261477 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" Jan 26 00:12:39 crc kubenswrapper[4975]: I0126 00:12:39.279645 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-76766fc778-pjjrb" podStartSLOduration=109.279624727 podStartE2EDuration="1m49.279624727s" podCreationTimestamp="2026-01-26 00:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:12:39.275300948 +0000 UTC m=+343.396506442" watchObservedRunningTime="2026-01-26 00:12:39.279624727 +0000 UTC m=+343.400830231" Jan 26 00:12:40 crc kubenswrapper[4975]: I0126 00:12:40.482095 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:12:40 crc kubenswrapper[4975]: I0126 00:12:40.482170 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:12:45 crc kubenswrapper[4975]: I0126 00:12:45.147601 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:12:46 crc kubenswrapper[4975]: I0126 00:12:46.301140 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/2.log" Jan 26 00:12:46 crc kubenswrapper[4975]: I0126 00:12:46.301655 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerStarted","Data":"52e713c25cfad212ae246ddd3aa27a1281ff96d23027dde2c8d64f51208f640c"} Jan 26 00:12:46 crc kubenswrapper[4975]: I0126 00:12:46.302342 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:12:46 crc kubenswrapper[4975]: I0126 00:12:46.307583 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.310786 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.311664 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" containerName="controller-manager" containerID="cri-o://15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594" gracePeriod=30 Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.421443 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.421697 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerName="route-controller-manager" containerID="cri-o://96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f" gracePeriod=30 Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.710215 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.900440 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert\") pod \"19f40110-25a0-41cb-b740-67d93659b7dc\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.900537 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles\") pod \"19f40110-25a0-41cb-b740-67d93659b7dc\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.900572 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca\") pod \"19f40110-25a0-41cb-b740-67d93659b7dc\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.900608 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config\") pod \"19f40110-25a0-41cb-b740-67d93659b7dc\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.900680 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dg85s\" (UniqueName: \"kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s\") pod \"19f40110-25a0-41cb-b740-67d93659b7dc\" (UID: \"19f40110-25a0-41cb-b740-67d93659b7dc\") " Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.901628 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca" (OuterVolumeSpecName: "client-ca") pod "19f40110-25a0-41cb-b740-67d93659b7dc" (UID: "19f40110-25a0-41cb-b740-67d93659b7dc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.901747 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "19f40110-25a0-41cb-b740-67d93659b7dc" (UID: "19f40110-25a0-41cb-b740-67d93659b7dc"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.902505 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config" (OuterVolumeSpecName: "config") pod "19f40110-25a0-41cb-b740-67d93659b7dc" (UID: "19f40110-25a0-41cb-b740-67d93659b7dc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.908272 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s" (OuterVolumeSpecName: "kube-api-access-dg85s") pod "19f40110-25a0-41cb-b740-67d93659b7dc" (UID: "19f40110-25a0-41cb-b740-67d93659b7dc"). InnerVolumeSpecName "kube-api-access-dg85s". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:13:00 crc kubenswrapper[4975]: I0126 00:13:00.909216 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "19f40110-25a0-41cb-b740-67d93659b7dc" (UID: "19f40110-25a0-41cb-b740-67d93659b7dc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.002145 4975 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.002212 4975 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.002222 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19f40110-25a0-41cb-b740-67d93659b7dc-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.002233 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dg85s\" (UniqueName: \"kubernetes.io/projected/19f40110-25a0-41cb-b740-67d93659b7dc-kube-api-access-dg85s\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.002246 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19f40110-25a0-41cb-b740-67d93659b7dc-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.257973 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.400061 4975 generic.go:334] "Generic (PLEG): container finished" podID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerID="96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f" exitCode=0 Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.400129 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.400147 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" event={"ID":"557733ab-6df8-42b0-893e-a10f05e34f2d","Type":"ContainerDied","Data":"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f"} Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.400179 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp" event={"ID":"557733ab-6df8-42b0-893e-a10f05e34f2d","Type":"ContainerDied","Data":"84910189e1e1311ab6731b789daae59be20e5e03c466e590cded9013b05bf524"} Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.400196 4975 scope.go:117] "RemoveContainer" containerID="96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.401639 4975 generic.go:334] "Generic (PLEG): container finished" podID="19f40110-25a0-41cb-b740-67d93659b7dc" containerID="15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594" exitCode=0 Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.401666 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" event={"ID":"19f40110-25a0-41cb-b740-67d93659b7dc","Type":"ContainerDied","Data":"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594"} Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.401681 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" event={"ID":"19f40110-25a0-41cb-b740-67d93659b7dc","Type":"ContainerDied","Data":"5654e1e369de0a21f085ebb73818b5497d83f4bc745246fdddaa9a01e89be36f"} Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.401746 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-krk6p" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.407149 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert\") pod \"557733ab-6df8-42b0-893e-a10f05e34f2d\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.407250 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca\") pod \"557733ab-6df8-42b0-893e-a10f05e34f2d\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.407311 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config\") pod \"557733ab-6df8-42b0-893e-a10f05e34f2d\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.407383 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwtz2\" (UniqueName: \"kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2\") pod \"557733ab-6df8-42b0-893e-a10f05e34f2d\" (UID: \"557733ab-6df8-42b0-893e-a10f05e34f2d\") " Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.408162 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca" (OuterVolumeSpecName: "client-ca") pod "557733ab-6df8-42b0-893e-a10f05e34f2d" (UID: "557733ab-6df8-42b0-893e-a10f05e34f2d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.408206 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config" (OuterVolumeSpecName: "config") pod "557733ab-6df8-42b0-893e-a10f05e34f2d" (UID: "557733ab-6df8-42b0-893e-a10f05e34f2d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.411445 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "557733ab-6df8-42b0-893e-a10f05e34f2d" (UID: "557733ab-6df8-42b0-893e-a10f05e34f2d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.411696 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2" (OuterVolumeSpecName: "kube-api-access-nwtz2") pod "557733ab-6df8-42b0-893e-a10f05e34f2d" (UID: "557733ab-6df8-42b0-893e-a10f05e34f2d"). InnerVolumeSpecName "kube-api-access-nwtz2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.416745 4975 scope.go:117] "RemoveContainer" containerID="96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f" Jan 26 00:13:01 crc kubenswrapper[4975]: E0126 00:13:01.417691 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f\": container with ID starting with 96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f not found: ID does not exist" containerID="96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.417756 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f"} err="failed to get container status \"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f\": rpc error: code = NotFound desc = could not find container \"96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f\": container with ID starting with 96175440d2d12ca090bf40787b800f4ed116ac14002cd2a76f505e52ee45b81f not found: ID does not exist" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.417792 4975 scope.go:117] "RemoveContainer" containerID="15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.432867 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.435847 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-krk6p"] Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.439338 4975 scope.go:117] "RemoveContainer" containerID="15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594" Jan 26 00:13:01 crc kubenswrapper[4975]: E0126 00:13:01.439789 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594\": container with ID starting with 15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594 not found: ID does not exist" containerID="15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.439840 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594"} err="failed to get container status \"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594\": rpc error: code = NotFound desc = could not find container \"15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594\": container with ID starting with 15f1ded0751ca1f526d713152d6f74af835e5da8ba7bab0ad6e01eb3b60d2594 not found: ID does not exist" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.508665 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwtz2\" (UniqueName: \"kubernetes.io/projected/557733ab-6df8-42b0-893e-a10f05e34f2d-kube-api-access-nwtz2\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.508720 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/557733ab-6df8-42b0-893e-a10f05e34f2d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.508749 4975 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.508761 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/557733ab-6df8-42b0-893e-a10f05e34f2d-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.741022 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:13:01 crc kubenswrapper[4975]: I0126 00:13:01.747133 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-xx2cp"] Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.167097 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" path="/var/lib/kubelet/pods/19f40110-25a0-41cb-b740-67d93659b7dc/volumes" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.168202 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" path="/var/lib/kubelet/pods/557733ab-6df8-42b0-893e-a10f05e34f2d/volumes" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.396408 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m"] Jan 26 00:13:02 crc kubenswrapper[4975]: E0126 00:13:02.397421 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerName="route-controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.397499 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerName="route-controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: E0126 00:13:02.397578 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.397596 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 00:13:02 crc kubenswrapper[4975]: E0126 00:13:02.397624 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" containerName="controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.397676 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" containerName="controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.398065 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="19f40110-25a0-41cb-b740-67d93659b7dc" containerName="controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.398103 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.398139 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="557733ab-6df8-42b0-893e-a10f05e34f2d" containerName="route-controller-manager" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.402095 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.405781 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.406783 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.407864 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.410421 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.410622 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.411669 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.411817 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.413287 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.413658 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.413992 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.414383 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.414835 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m"] Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.414981 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.415190 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.415637 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.421663 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.421770 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.421889 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz5zx\" (UniqueName: \"kubernetes.io/projected/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-kube-api-access-wz5zx\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.421967 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-serving-cert\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422027 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-client-ca\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422072 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-config\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422341 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzq82\" (UniqueName: \"kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422460 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422542 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-proxy-ca-bundles\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.422636 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.424570 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524385 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524499 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-proxy-ca-bundles\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524555 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524603 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524661 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz5zx\" (UniqueName: \"kubernetes.io/projected/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-kube-api-access-wz5zx\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524718 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-serving-cert\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524810 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-config\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524860 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-client-ca\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.524955 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzq82\" (UniqueName: \"kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.526304 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-proxy-ca-bundles\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.527481 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-client-ca\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.531717 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.532254 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.532417 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-config\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.536331 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.540958 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-serving-cert\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.554783 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz5zx\" (UniqueName: \"kubernetes.io/projected/0a77ea3b-b24a-4af7-a0c2-203b82ebc497-kube-api-access-wz5zx\") pod \"controller-manager-5bdbdc4497-vkw4m\" (UID: \"0a77ea3b-b24a-4af7-a0c2-203b82ebc497\") " pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.560625 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzq82\" (UniqueName: \"kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82\") pod \"route-controller-manager-7d99c98c86-6nwl7\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.746109 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.762271 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:02 crc kubenswrapper[4975]: I0126 00:13:02.962321 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:03 crc kubenswrapper[4975]: I0126 00:13:03.027843 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m"] Jan 26 00:13:03 crc kubenswrapper[4975]: W0126 00:13:03.030328 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a77ea3b_b24a_4af7_a0c2_203b82ebc497.slice/crio-a7b34d2357fb52e677ffef67bd13af6e8781500bef8255a1b9394aba09b05f4a WatchSource:0}: Error finding container a7b34d2357fb52e677ffef67bd13af6e8781500bef8255a1b9394aba09b05f4a: Status 404 returned error can't find the container with id a7b34d2357fb52e677ffef67bd13af6e8781500bef8255a1b9394aba09b05f4a Jan 26 00:13:03 crc kubenswrapper[4975]: I0126 00:13:03.429389 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" event={"ID":"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c","Type":"ContainerStarted","Data":"dafa0e7c20c5f0c813940e290e400a282ad18386ee8cfb008e6a7364cce24c03"} Jan 26 00:13:03 crc kubenswrapper[4975]: I0126 00:13:03.431353 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" event={"ID":"0a77ea3b-b24a-4af7-a0c2-203b82ebc497","Type":"ContainerStarted","Data":"a7b34d2357fb52e677ffef67bd13af6e8781500bef8255a1b9394aba09b05f4a"} Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.440505 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" event={"ID":"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c","Type":"ContainerStarted","Data":"61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7"} Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.440894 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.442976 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" event={"ID":"0a77ea3b-b24a-4af7-a0c2-203b82ebc497","Type":"ContainerStarted","Data":"d6e650c0808b9f07453baa3cf0146eb3914ece93361416925a4f76a7cdcf2d21"} Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.443184 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.454042 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.467516 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" podStartSLOduration=4.467477877 podStartE2EDuration="4.467477877s" podCreationTimestamp="2026-01-26 00:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:13:04.463745634 +0000 UTC m=+368.584951138" watchObservedRunningTime="2026-01-26 00:13:04.467477877 +0000 UTC m=+368.588683381" Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.483477 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5bdbdc4497-vkw4m" podStartSLOduration=4.483440856 podStartE2EDuration="4.483440856s" podCreationTimestamp="2026-01-26 00:13:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:13:04.481314667 +0000 UTC m=+368.602520171" watchObservedRunningTime="2026-01-26 00:13:04.483440856 +0000 UTC m=+368.604646350" Jan 26 00:13:04 crc kubenswrapper[4975]: I0126 00:13:04.658083 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:10 crc kubenswrapper[4975]: I0126 00:13:10.481707 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:13:10 crc kubenswrapper[4975]: I0126 00:13:10.482178 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:13:20 crc kubenswrapper[4975]: I0126 00:13:20.285208 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:20 crc kubenswrapper[4975]: I0126 00:13:20.286088 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerName="route-controller-manager" containerID="cri-o://61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7" gracePeriod=30 Jan 26 00:13:20 crc kubenswrapper[4975]: E0126 00:13:20.404298 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaee5f3c8_e861_4fa0_8f7b_7c43e5b4221c.slice/crio-61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7.scope\": RecentStats: unable to find data in memory cache]" Jan 26 00:13:22 crc kubenswrapper[4975]: I0126 00:13:22.763177 4975 patch_prober.go:28] interesting pod/route-controller-manager-7d99c98c86-6nwl7 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 26 00:13:22 crc kubenswrapper[4975]: I0126 00:13:22.764964 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.559039 4975 generic.go:334] "Generic (PLEG): container finished" podID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerID="61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7" exitCode=0 Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.559117 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" event={"ID":"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c","Type":"ContainerDied","Data":"61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7"} Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.559398 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" event={"ID":"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c","Type":"ContainerDied","Data":"dafa0e7c20c5f0c813940e290e400a282ad18386ee8cfb008e6a7364cce24c03"} Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.559417 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dafa0e7c20c5f0c813940e290e400a282ad18386ee8cfb008e6a7364cce24c03" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.567476 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.596292 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s"] Jan 26 00:13:23 crc kubenswrapper[4975]: E0126 00:13:23.596579 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerName="route-controller-manager" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.596600 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerName="route-controller-manager" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.597012 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" containerName="route-controller-manager" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.597551 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.615817 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s"] Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.671741 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca\") pod \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.672131 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config\") pod \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.672233 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzq82\" (UniqueName: \"kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82\") pod \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.672316 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert\") pod \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\" (UID: \"aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c\") " Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.673029 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca" (OuterVolumeSpecName: "client-ca") pod "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" (UID: "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.673167 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config" (OuterVolumeSpecName: "config") pod "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" (UID: "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.677462 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" (UID: "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.677463 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82" (OuterVolumeSpecName: "kube-api-access-nzq82") pod "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" (UID: "aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c"). InnerVolumeSpecName "kube-api-access-nzq82". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.774037 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-serving-cert\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.774987 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-client-ca\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775177 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-config\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775295 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzgp2\" (UniqueName: \"kubernetes.io/projected/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-kube-api-access-wzgp2\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775465 4975 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775566 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzq82\" (UniqueName: \"kubernetes.io/projected/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-kube-api-access-nzq82\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775651 4975 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.775751 4975 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c-client-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.877412 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-client-ca\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.877503 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-config\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.877523 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzgp2\" (UniqueName: \"kubernetes.io/projected/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-kube-api-access-wzgp2\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.877568 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-serving-cert\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.879114 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-client-ca\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.879329 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-config\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.882718 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-serving-cert\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.900105 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzgp2\" (UniqueName: \"kubernetes.io/projected/58cd1e17-5639-4bdb-b1c6-a11fc4d6af87-kube-api-access-wzgp2\") pod \"route-controller-manager-5496799c46-mtq7s\" (UID: \"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87\") " pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:23 crc kubenswrapper[4975]: I0126 00:13:23.918983 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.347255 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s"] Jan 26 00:13:24 crc kubenswrapper[4975]: W0126 00:13:24.350347 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58cd1e17_5639_4bdb_b1c6_a11fc4d6af87.slice/crio-1e0c9687bea8ec989ebf0832a17eedec7d850f1db4f8cf04802cd27239d84835 WatchSource:0}: Error finding container 1e0c9687bea8ec989ebf0832a17eedec7d850f1db4f8cf04802cd27239d84835: Status 404 returned error can't find the container with id 1e0c9687bea8ec989ebf0832a17eedec7d850f1db4f8cf04802cd27239d84835 Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.568133 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7" Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.568169 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" event={"ID":"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87","Type":"ContainerStarted","Data":"898c2d470ccfc12bb0b1a337055158893672040e7b14e5558f5c12f20895f079"} Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.568282 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.568296 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" event={"ID":"58cd1e17-5639-4bdb-b1c6-a11fc4d6af87","Type":"ContainerStarted","Data":"1e0c9687bea8ec989ebf0832a17eedec7d850f1db4f8cf04802cd27239d84835"} Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.590683 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" podStartSLOduration=4.590658088 podStartE2EDuration="4.590658088s" podCreationTimestamp="2026-01-26 00:13:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:13:24.586750951 +0000 UTC m=+388.707956435" watchObservedRunningTime="2026-01-26 00:13:24.590658088 +0000 UTC m=+388.711863592" Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.604592 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.616718 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d99c98c86-6nwl7"] Jan 26 00:13:24 crc kubenswrapper[4975]: I0126 00:13:24.967097 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5496799c46-mtq7s" Jan 26 00:13:26 crc kubenswrapper[4975]: I0126 00:13:26.157486 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c" path="/var/lib/kubelet/pods/aee5f3c8-e861-4fa0-8f7b-7c43e5b4221c/volumes" Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.481753 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.482348 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.482415 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.483105 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.483200 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e" gracePeriod=600 Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.678768 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e" exitCode=0 Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.678831 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e"} Jan 26 00:13:40 crc kubenswrapper[4975]: I0126 00:13:40.679173 4975 scope.go:117] "RemoveContainer" containerID="33715d487ee4aeed35c838400aca308d5a755198136b8f8bf1c9ba64e48a2283" Jan 26 00:13:41 crc kubenswrapper[4975]: I0126 00:13:41.687898 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95"} Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.732324 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jw8lc"] Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.733569 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.746966 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jw8lc"] Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861284 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3d3efa0d-086a-4602-b102-945ab7ec373c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861333 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3d3efa0d-086a-4602-b102-945ab7ec373c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861356 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-certificates\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861395 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-494jg\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-kube-api-access-494jg\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861411 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-tls\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861433 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861524 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-trusted-ca\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.861570 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-bound-sa-token\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.882584 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962812 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3d3efa0d-086a-4602-b102-945ab7ec373c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962872 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3d3efa0d-086a-4602-b102-945ab7ec373c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962904 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-certificates\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962947 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-494jg\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-kube-api-access-494jg\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962970 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-tls\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.962990 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-trusted-ca\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.963010 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-bound-sa-token\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.963394 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/3d3efa0d-086a-4602-b102-945ab7ec373c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.964378 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-certificates\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.965002 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d3efa0d-086a-4602-b102-945ab7ec373c-trusted-ca\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.972959 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/3d3efa0d-086a-4602-b102-945ab7ec373c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.975492 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-registry-tls\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.979138 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-bound-sa-token\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:01 crc kubenswrapper[4975]: I0126 00:14:01.983204 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-494jg\" (UniqueName: \"kubernetes.io/projected/3d3efa0d-086a-4602-b102-945ab7ec373c-kube-api-access-494jg\") pod \"image-registry-66df7c8f76-jw8lc\" (UID: \"3d3efa0d-086a-4602-b102-945ab7ec373c\") " pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:02 crc kubenswrapper[4975]: I0126 00:14:02.052468 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:02 crc kubenswrapper[4975]: I0126 00:14:02.552753 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-jw8lc"] Jan 26 00:14:02 crc kubenswrapper[4975]: I0126 00:14:02.867843 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" event={"ID":"3d3efa0d-086a-4602-b102-945ab7ec373c","Type":"ContainerStarted","Data":"780dedfcb5139287c95360c6a985881f95ac82ec19902a384b1e46be67f16ba2"} Jan 26 00:14:03 crc kubenswrapper[4975]: I0126 00:14:03.874037 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" event={"ID":"3d3efa0d-086a-4602-b102-945ab7ec373c","Type":"ContainerStarted","Data":"9f16f4b6a16b04227d7a210ac157a30a3bdd668f22742d1438d8ab9b1f615975"} Jan 26 00:14:03 crc kubenswrapper[4975]: I0126 00:14:03.874923 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:03 crc kubenswrapper[4975]: I0126 00:14:03.898392 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" podStartSLOduration=2.898377215 podStartE2EDuration="2.898377215s" podCreationTimestamp="2026-01-26 00:14:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:14:03.894302171 +0000 UTC m=+428.015507695" watchObservedRunningTime="2026-01-26 00:14:03.898377215 +0000 UTC m=+428.019582709" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.338879 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.341417 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fccwg" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="registry-server" containerID="cri-o://170e622f13df7bbdb96f316809c498ecb5dc65fc6f3f604b9ec297f5a2dd331e" gracePeriod=30 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.344977 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.345245 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lx8th" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="registry-server" containerID="cri-o://d1a0c2da1197cd3e91d54bb1f69b1afdd279bc0b3cbe375736fb14a0e00df2cb" gracePeriod=30 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.371131 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.371503 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" containerID="cri-o://52e713c25cfad212ae246ddd3aa27a1281ff96d23027dde2c8d64f51208f640c" gracePeriod=30 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.384695 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.385138 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-hmnjx" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="registry-server" containerID="cri-o://dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059" gracePeriod=30 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.395886 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfbr"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.397694 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.402941 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.403283 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-n67bw" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="registry-server" containerID="cri-o://ffe0da6a1a6506293c1e3556fe17620c5845c3a7e92b70358e5b163b7a1a7ce2" gracePeriod=30 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.406746 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfbr"] Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.466606 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.466673 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.466727 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbsfl\" (UniqueName: \"kubernetes.io/projected/b31428e9-0e62-40f7-b81f-96e44e63b0b5-kube-api-access-rbsfl\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.568316 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.568377 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.568413 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbsfl\" (UniqueName: \"kubernetes.io/projected/b31428e9-0e62-40f7-b81f-96e44e63b0b5-kube-api-access-rbsfl\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.570970 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.575617 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b31428e9-0e62-40f7-b81f-96e44e63b0b5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.585120 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbsfl\" (UniqueName: \"kubernetes.io/projected/b31428e9-0e62-40f7-b81f-96e44e63b0b5-kube-api-access-rbsfl\") pod \"marketplace-operator-79b997595-kvfbr\" (UID: \"b31428e9-0e62-40f7-b81f-96e44e63b0b5\") " pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.923036 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.927280 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.937992 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-8xrbd_fcf19955-9a00-4a50-8ce1-bd7098c45eec/marketplace-operator/2.log" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.938045 4975 generic.go:334] "Generic (PLEG): container finished" podID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerID="52e713c25cfad212ae246ddd3aa27a1281ff96d23027dde2c8d64f51208f640c" exitCode=0 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.938106 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerDied","Data":"52e713c25cfad212ae246ddd3aa27a1281ff96d23027dde2c8d64f51208f640c"} Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.938145 4975 scope.go:117] "RemoveContainer" containerID="8d8caa150f4dee4ecc24bb08934269a86a90b780720026e4c6a6299441b8875e" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.943260 4975 generic.go:334] "Generic (PLEG): container finished" podID="75c99291-b46b-4f76-9922-cd530cca51c9" containerID="d1a0c2da1197cd3e91d54bb1f69b1afdd279bc0b3cbe375736fb14a0e00df2cb" exitCode=0 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.943342 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerDied","Data":"d1a0c2da1197cd3e91d54bb1f69b1afdd279bc0b3cbe375736fb14a0e00df2cb"} Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.945891 4975 generic.go:334] "Generic (PLEG): container finished" podID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerID="dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059" exitCode=0 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.945944 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerDied","Data":"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059"} Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.945965 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hmnjx" event={"ID":"927dc4e6-f4c4-497b-92d6-3218ab0794ac","Type":"ContainerDied","Data":"0646effc2cb331d655d0c8a8444f0f248b74a419c32606dadfe257fe95033c9b"} Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.946030 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hmnjx" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.966672 4975 generic.go:334] "Generic (PLEG): container finished" podID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerID="ffe0da6a1a6506293c1e3556fe17620c5845c3a7e92b70358e5b163b7a1a7ce2" exitCode=0 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.967003 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerDied","Data":"ffe0da6a1a6506293c1e3556fe17620c5845c3a7e92b70358e5b163b7a1a7ce2"} Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.974223 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z84m2\" (UniqueName: \"kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2\") pod \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.974420 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities\") pod \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.974475 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content\") pod \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\" (UID: \"927dc4e6-f4c4-497b-92d6-3218ab0794ac\") " Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.980990 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities" (OuterVolumeSpecName: "utilities") pod "927dc4e6-f4c4-497b-92d6-3218ab0794ac" (UID: "927dc4e6-f4c4-497b-92d6-3218ab0794ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.981809 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2" (OuterVolumeSpecName: "kube-api-access-z84m2") pod "927dc4e6-f4c4-497b-92d6-3218ab0794ac" (UID: "927dc4e6-f4c4-497b-92d6-3218ab0794ac"). InnerVolumeSpecName "kube-api-access-z84m2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.984168 4975 generic.go:334] "Generic (PLEG): container finished" podID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerID="170e622f13df7bbdb96f316809c498ecb5dc65fc6f3f604b9ec297f5a2dd331e" exitCode=0 Jan 26 00:14:13 crc kubenswrapper[4975]: I0126 00:14:13.984235 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerDied","Data":"170e622f13df7bbdb96f316809c498ecb5dc65fc6f3f604b9ec297f5a2dd331e"} Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.001793 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "927dc4e6-f4c4-497b-92d6-3218ab0794ac" (UID: "927dc4e6-f4c4-497b-92d6-3218ab0794ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.002409 4975 scope.go:117] "RemoveContainer" containerID="dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.025803 4975 scope.go:117] "RemoveContainer" containerID="288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.043555 4975 scope.go:117] "RemoveContainer" containerID="90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.063045 4975 scope.go:117] "RemoveContainer" containerID="dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059" Jan 26 00:14:14 crc kubenswrapper[4975]: E0126 00:14:14.064031 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059\": container with ID starting with dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059 not found: ID does not exist" containerID="dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.064059 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059"} err="failed to get container status \"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059\": rpc error: code = NotFound desc = could not find container \"dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059\": container with ID starting with dc85e71dd13ca42d02ca9ff25cb101f5c429502ae865b29e85a83e004f3f5059 not found: ID does not exist" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.064080 4975 scope.go:117] "RemoveContainer" containerID="288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5" Jan 26 00:14:14 crc kubenswrapper[4975]: E0126 00:14:14.065272 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5\": container with ID starting with 288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5 not found: ID does not exist" containerID="288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.065393 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5"} err="failed to get container status \"288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5\": rpc error: code = NotFound desc = could not find container \"288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5\": container with ID starting with 288673d59a5898c476b90ebeee1b96738621c584fb415b02d9a920f9daeb78c5 not found: ID does not exist" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.065463 4975 scope.go:117] "RemoveContainer" containerID="90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5" Jan 26 00:14:14 crc kubenswrapper[4975]: E0126 00:14:14.065937 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5\": container with ID starting with 90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5 not found: ID does not exist" containerID="90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.065993 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5"} err="failed to get container status \"90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5\": rpc error: code = NotFound desc = could not find container \"90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5\": container with ID starting with 90d962c1959b09ad9f415a8eb73519e70389f190d3db42055e3a1b54b267a9b5 not found: ID does not exist" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.078238 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z84m2\" (UniqueName: \"kubernetes.io/projected/927dc4e6-f4c4-497b-92d6-3218ab0794ac-kube-api-access-z84m2\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.078281 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.078293 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/927dc4e6-f4c4-497b-92d6-3218ab0794ac-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.267637 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.274506 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hmnjx"] Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.346177 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.386578 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsn6v\" (UniqueName: \"kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v\") pod \"75c99291-b46b-4f76-9922-cd530cca51c9\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.386697 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities\") pod \"75c99291-b46b-4f76-9922-cd530cca51c9\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.386799 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content\") pod \"75c99291-b46b-4f76-9922-cd530cca51c9\" (UID: \"75c99291-b46b-4f76-9922-cd530cca51c9\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.388939 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities" (OuterVolumeSpecName: "utilities") pod "75c99291-b46b-4f76-9922-cd530cca51c9" (UID: "75c99291-b46b-4f76-9922-cd530cca51c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.393525 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v" (OuterVolumeSpecName: "kube-api-access-dsn6v") pod "75c99291-b46b-4f76-9922-cd530cca51c9" (UID: "75c99291-b46b-4f76-9922-cd530cca51c9"). InnerVolumeSpecName "kube-api-access-dsn6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.433362 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.442259 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.446772 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.460281 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "75c99291-b46b-4f76-9922-cd530cca51c9" (UID: "75c99291-b46b-4f76-9922-cd530cca51c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.495301 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics\") pod \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.495389 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca\") pod \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.495420 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities\") pod \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.495488 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t75h8\" (UniqueName: \"kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8\") pod \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\" (UID: \"fcf19955-9a00-4a50-8ce1-bd7098c45eec\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.496573 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities\") pod \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.496629 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content\") pod \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.496652 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fsqr\" (UniqueName: \"kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr\") pod \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\" (UID: \"27ed45eb-a90d-4bd5-8a17-8988f53407aa\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.496677 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content\") pod \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.496768 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbtcw\" (UniqueName: \"kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw\") pod \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\" (UID: \"034b4c5e-88c6-4dd7-a67d-bd9c23a75192\") " Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.497090 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.497111 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsn6v\" (UniqueName: \"kubernetes.io/projected/75c99291-b46b-4f76-9922-cd530cca51c9-kube-api-access-dsn6v\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.497128 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/75c99291-b46b-4f76-9922-cd530cca51c9-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.521354 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw" (OuterVolumeSpecName: "kube-api-access-jbtcw") pod "034b4c5e-88c6-4dd7-a67d-bd9c23a75192" (UID: "034b4c5e-88c6-4dd7-a67d-bd9c23a75192"). InnerVolumeSpecName "kube-api-access-jbtcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.528417 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "fcf19955-9a00-4a50-8ce1-bd7098c45eec" (UID: "fcf19955-9a00-4a50-8ce1-bd7098c45eec"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.534685 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr" (OuterVolumeSpecName: "kube-api-access-7fsqr") pod "27ed45eb-a90d-4bd5-8a17-8988f53407aa" (UID: "27ed45eb-a90d-4bd5-8a17-8988f53407aa"). InnerVolumeSpecName "kube-api-access-7fsqr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.538828 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities" (OuterVolumeSpecName: "utilities") pod "27ed45eb-a90d-4bd5-8a17-8988f53407aa" (UID: "27ed45eb-a90d-4bd5-8a17-8988f53407aa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.542225 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "fcf19955-9a00-4a50-8ce1-bd7098c45eec" (UID: "fcf19955-9a00-4a50-8ce1-bd7098c45eec"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.544706 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities" (OuterVolumeSpecName: "utilities") pod "034b4c5e-88c6-4dd7-a67d-bd9c23a75192" (UID: "034b4c5e-88c6-4dd7-a67d-bd9c23a75192"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.553830 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kvfbr"] Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.579406 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8" (OuterVolumeSpecName: "kube-api-access-t75h8") pod "fcf19955-9a00-4a50-8ce1-bd7098c45eec" (UID: "fcf19955-9a00-4a50-8ce1-bd7098c45eec"). InnerVolumeSpecName "kube-api-access-t75h8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607707 4975 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607747 4975 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fcf19955-9a00-4a50-8ce1-bd7098c45eec-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607764 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607774 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t75h8\" (UniqueName: \"kubernetes.io/projected/fcf19955-9a00-4a50-8ce1-bd7098c45eec-kube-api-access-t75h8\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607783 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607793 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fsqr\" (UniqueName: \"kubernetes.io/projected/27ed45eb-a90d-4bd5-8a17-8988f53407aa-kube-api-access-7fsqr\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.607802 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbtcw\" (UniqueName: \"kubernetes.io/projected/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-kube-api-access-jbtcw\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.616398 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27ed45eb-a90d-4bd5-8a17-8988f53407aa" (UID: "27ed45eb-a90d-4bd5-8a17-8988f53407aa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.710883 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27ed45eb-a90d-4bd5-8a17-8988f53407aa-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.769388 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "034b4c5e-88c6-4dd7-a67d-bd9c23a75192" (UID: "034b4c5e-88c6-4dd7-a67d-bd9c23a75192"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.814360 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/034b4c5e-88c6-4dd7-a67d-bd9c23a75192-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.993243 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-n67bw" event={"ID":"034b4c5e-88c6-4dd7-a67d-bd9c23a75192","Type":"ContainerDied","Data":"be4ffe6cdcf63f21b5c62a03ef992110851f5c38ca8dac5072681a1b69361b70"} Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.993305 4975 scope.go:117] "RemoveContainer" containerID="ffe0da6a1a6506293c1e3556fe17620c5845c3a7e92b70358e5b163b7a1a7ce2" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.993371 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-n67bw" Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.996246 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fccwg" event={"ID":"27ed45eb-a90d-4bd5-8a17-8988f53407aa","Type":"ContainerDied","Data":"50cbeb4d625c525ecf5b6ce724e964ab10e1260dd51ff5f92359b3334df4c35f"} Jan 26 00:14:14 crc kubenswrapper[4975]: I0126 00:14:14.996309 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fccwg" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.002400 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" event={"ID":"fcf19955-9a00-4a50-8ce1-bd7098c45eec","Type":"ContainerDied","Data":"1a8578ee6d37e1a4aa9fc7e446a0cf36d54c302ea777b2f258d01df9148992c2"} Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.002460 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8xrbd" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.004683 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lx8th" event={"ID":"75c99291-b46b-4f76-9922-cd530cca51c9","Type":"ContainerDied","Data":"6d49219568d933cec9f451c107dda2ccd2dc2bc13f18ee02e85720676baedfbb"} Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.005032 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lx8th" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.006541 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" event={"ID":"b31428e9-0e62-40f7-b81f-96e44e63b0b5","Type":"ContainerStarted","Data":"e1cfde2f7357e0e3b8567dd089885426145976f9a6b25fd1cd291fd51d9cfbba"} Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.006572 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" event={"ID":"b31428e9-0e62-40f7-b81f-96e44e63b0b5","Type":"ContainerStarted","Data":"c7ba40ca96cf00a8d5adf3d59fe6b86afef679e8a1055cf075b4033f32caf1ce"} Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.007095 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.009302 4975 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kvfbr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.63:8080/healthz\": dial tcp 10.217.0.63:8080: connect: connection refused" start-of-body= Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.009361 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" podUID="b31428e9-0e62-40f7-b81f-96e44e63b0b5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.63:8080/healthz\": dial tcp 10.217.0.63:8080: connect: connection refused" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.011781 4975 scope.go:117] "RemoveContainer" containerID="65ffe73db50818555831dbbefa69b6c19645067dadd859e1c23c3c95cbc6814b" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.030495 4975 scope.go:117] "RemoveContainer" containerID="a7017a6f1d2aa42698072decc1aa3f9dfe2c7e17a5347ef266baa53302756de4" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.033481 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" podStartSLOduration=2.033441712 podStartE2EDuration="2.033441712s" podCreationTimestamp="2026-01-26 00:14:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:14:15.025808498 +0000 UTC m=+439.147013992" watchObservedRunningTime="2026-01-26 00:14:15.033441712 +0000 UTC m=+439.154647226" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.048847 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.053500 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-n67bw"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.059853 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.060868 4975 scope.go:117] "RemoveContainer" containerID="170e622f13df7bbdb96f316809c498ecb5dc65fc6f3f604b9ec297f5a2dd331e" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.063597 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8xrbd"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.079175 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.086205 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lx8th"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.087285 4975 scope.go:117] "RemoveContainer" containerID="dc1c15f4a9f17edca85ab8e22a42d0ee9e039a584113f28e911956da87b4cbdf" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.095523 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.100572 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fccwg"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.101751 4975 scope.go:117] "RemoveContainer" containerID="517d5130a369efec7ffbbbc8cfac6d33522ca9fddc5bed3d3c70859ee4c6744c" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.117531 4975 scope.go:117] "RemoveContainer" containerID="52e713c25cfad212ae246ddd3aa27a1281ff96d23027dde2c8d64f51208f640c" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.132459 4975 scope.go:117] "RemoveContainer" containerID="d1a0c2da1197cd3e91d54bb1f69b1afdd279bc0b3cbe375736fb14a0e00df2cb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.156990 4975 scope.go:117] "RemoveContainer" containerID="17384369c8e019fbf81142ad4b744e8e76ad7cfe322098de76f9b4ebd6c716a9" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.174641 4975 scope.go:117] "RemoveContainer" containerID="23615b64c47ff1d74cf4860b0afb9efb0da12db5d302ae18ad5e0d58666a3ed9" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.561071 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.563958 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.564156 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.564299 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.564430 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.564637 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.564845 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.565040 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.565229 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.565392 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.565545 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.565790 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.565951 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.566097 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.566248 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.566400 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.566560 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.566722 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.567183 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.567377 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.567550 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.567713 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.567966 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="extract-content" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.568154 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.568305 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.568466 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.568623 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.568836 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.569025 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="extract-utilities" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.569202 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.569382 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.569854 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.570064 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.570242 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.570443 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.570631 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.570868 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.571060 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" containerName="registry-server" Jan 26 00:14:15 crc kubenswrapper[4975]: E0126 00:14:15.571462 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.571658 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.572015 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" containerName="marketplace-operator" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.573862 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.578473 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.582351 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.628582 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.628685 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xplfl\" (UniqueName: \"kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.628927 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.730642 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xplfl\" (UniqueName: \"kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.730806 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.730877 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.731491 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.731854 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.760142 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xplfl\" (UniqueName: \"kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl\") pod \"redhat-marketplace-dfpjb\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.763496 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9xnqv"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.764511 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.766777 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.772159 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9xnqv"] Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.832044 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-catalog-content\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.832108 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-utilities\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.832303 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7x2s\" (UniqueName: \"kubernetes.io/projected/148781fe-d5ca-4956-822b-0bf9b8ba18d2-kube-api-access-l7x2s\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.895612 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.934502 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7x2s\" (UniqueName: \"kubernetes.io/projected/148781fe-d5ca-4956-822b-0bf9b8ba18d2-kube-api-access-l7x2s\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.934584 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-catalog-content\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.934627 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-utilities\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.935108 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-utilities\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.935533 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/148781fe-d5ca-4956-822b-0bf9b8ba18d2-catalog-content\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:15 crc kubenswrapper[4975]: I0126 00:14:15.954503 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7x2s\" (UniqueName: \"kubernetes.io/projected/148781fe-d5ca-4956-822b-0bf9b8ba18d2-kube-api-access-l7x2s\") pod \"redhat-operators-9xnqv\" (UID: \"148781fe-d5ca-4956-822b-0bf9b8ba18d2\") " pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.039968 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kvfbr" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.130163 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.179276 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="034b4c5e-88c6-4dd7-a67d-bd9c23a75192" path="/var/lib/kubelet/pods/034b4c5e-88c6-4dd7-a67d-bd9c23a75192/volumes" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.180037 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ed45eb-a90d-4bd5-8a17-8988f53407aa" path="/var/lib/kubelet/pods/27ed45eb-a90d-4bd5-8a17-8988f53407aa/volumes" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.180986 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75c99291-b46b-4f76-9922-cd530cca51c9" path="/var/lib/kubelet/pods/75c99291-b46b-4f76-9922-cd530cca51c9/volumes" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.182488 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="927dc4e6-f4c4-497b-92d6-3218ab0794ac" path="/var/lib/kubelet/pods/927dc4e6-f4c4-497b-92d6-3218ab0794ac/volumes" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.183256 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf19955-9a00-4a50-8ce1-bd7098c45eec" path="/var/lib/kubelet/pods/fcf19955-9a00-4a50-8ce1-bd7098c45eec/volumes" Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.358860 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:14:16 crc kubenswrapper[4975]: W0126 00:14:16.363449 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod69d5ae7e_3538_4c88_a51c_da93562ba9f8.slice/crio-5106853f9f2c0843857a1217689f2c316c8587294712c1bb366bd9c78057d2c1 WatchSource:0}: Error finding container 5106853f9f2c0843857a1217689f2c316c8587294712c1bb366bd9c78057d2c1: Status 404 returned error can't find the container with id 5106853f9f2c0843857a1217689f2c316c8587294712c1bb366bd9c78057d2c1 Jan 26 00:14:16 crc kubenswrapper[4975]: I0126 00:14:16.557945 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9xnqv"] Jan 26 00:14:16 crc kubenswrapper[4975]: W0126 00:14:16.611948 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod148781fe_d5ca_4956_822b_0bf9b8ba18d2.slice/crio-aa1140dee4354f9b5da58dbdf7aba468498a63587898671067d3c8cf89391b04 WatchSource:0}: Error finding container aa1140dee4354f9b5da58dbdf7aba468498a63587898671067d3c8cf89391b04: Status 404 returned error can't find the container with id aa1140dee4354f9b5da58dbdf7aba468498a63587898671067d3c8cf89391b04 Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.044662 4975 generic.go:334] "Generic (PLEG): container finished" podID="148781fe-d5ca-4956-822b-0bf9b8ba18d2" containerID="80073147cc14506f6c97026a4cebad7d6a47e8c8576816bab928be62ee5f47f3" exitCode=0 Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.044770 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xnqv" event={"ID":"148781fe-d5ca-4956-822b-0bf9b8ba18d2","Type":"ContainerDied","Data":"80073147cc14506f6c97026a4cebad7d6a47e8c8576816bab928be62ee5f47f3"} Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.044810 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xnqv" event={"ID":"148781fe-d5ca-4956-822b-0bf9b8ba18d2","Type":"ContainerStarted","Data":"aa1140dee4354f9b5da58dbdf7aba468498a63587898671067d3c8cf89391b04"} Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.048114 4975 generic.go:334] "Generic (PLEG): container finished" podID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerID="378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5" exitCode=0 Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.048261 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerDied","Data":"378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5"} Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.048334 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerStarted","Data":"5106853f9f2c0843857a1217689f2c316c8587294712c1bb366bd9c78057d2c1"} Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.950328 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jj6v7"] Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.951777 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.954555 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 26 00:14:17 crc kubenswrapper[4975]: I0126 00:14:17.970967 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jj6v7"] Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.062340 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqsk7\" (UniqueName: \"kubernetes.io/projected/2d3abc18-8fc6-4108-8d6e-b9268064a682-kube-api-access-rqsk7\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.062404 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-utilities\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.062686 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-catalog-content\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.159448 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xxssn"] Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.160615 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.164641 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.164675 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-utilities\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.164765 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-catalog-content\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.164806 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqsk7\" (UniqueName: \"kubernetes.io/projected/2d3abc18-8fc6-4108-8d6e-b9268064a682-kube-api-access-rqsk7\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.165391 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-catalog-content\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.166891 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2d3abc18-8fc6-4108-8d6e-b9268064a682-utilities\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.169699 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xxssn"] Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.199416 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqsk7\" (UniqueName: \"kubernetes.io/projected/2d3abc18-8fc6-4108-8d6e-b9268064a682-kube-api-access-rqsk7\") pod \"certified-operators-jj6v7\" (UID: \"2d3abc18-8fc6-4108-8d6e-b9268064a682\") " pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.266097 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-catalog-content\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.266157 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mwsb\" (UniqueName: \"kubernetes.io/projected/b81873b5-3563-4628-9cea-2e3837b4038c-kube-api-access-7mwsb\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.266198 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-utilities\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.328176 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.367646 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mwsb\" (UniqueName: \"kubernetes.io/projected/b81873b5-3563-4628-9cea-2e3837b4038c-kube-api-access-7mwsb\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.367818 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-utilities\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.368042 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-catalog-content\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.368520 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-catalog-content\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.368710 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b81873b5-3563-4628-9cea-2e3837b4038c-utilities\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.401182 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mwsb\" (UniqueName: \"kubernetes.io/projected/b81873b5-3563-4628-9cea-2e3837b4038c-kube-api-access-7mwsb\") pod \"community-operators-xxssn\" (UID: \"b81873b5-3563-4628-9cea-2e3837b4038c\") " pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.477239 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.666210 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jj6v7"] Jan 26 00:14:18 crc kubenswrapper[4975]: I0126 00:14:18.726859 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xxssn"] Jan 26 00:14:18 crc kubenswrapper[4975]: W0126 00:14:18.727771 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb81873b5_3563_4628_9cea_2e3837b4038c.slice/crio-a831d6827f9d9432d553175eead45a4469a9a423b56c5182985138bbac16fd8e WatchSource:0}: Error finding container a831d6827f9d9432d553175eead45a4469a9a423b56c5182985138bbac16fd8e: Status 404 returned error can't find the container with id a831d6827f9d9432d553175eead45a4469a9a423b56c5182985138bbac16fd8e Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.060706 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerStarted","Data":"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9"} Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.064971 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xnqv" event={"ID":"148781fe-d5ca-4956-822b-0bf9b8ba18d2","Type":"ContainerStarted","Data":"190b02564ef7347d4d61a21637bdaecdd03aaf906517fc3313cca68f08ba817e"} Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.091202 4975 generic.go:334] "Generic (PLEG): container finished" podID="b81873b5-3563-4628-9cea-2e3837b4038c" containerID="638bce0be8bd6a250e094e08c1d5acf4257079738cdb1c81c3839f2435e7a4ba" exitCode=0 Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.091313 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxssn" event={"ID":"b81873b5-3563-4628-9cea-2e3837b4038c","Type":"ContainerDied","Data":"638bce0be8bd6a250e094e08c1d5acf4257079738cdb1c81c3839f2435e7a4ba"} Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.091343 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxssn" event={"ID":"b81873b5-3563-4628-9cea-2e3837b4038c","Type":"ContainerStarted","Data":"a831d6827f9d9432d553175eead45a4469a9a423b56c5182985138bbac16fd8e"} Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.100056 4975 generic.go:334] "Generic (PLEG): container finished" podID="2d3abc18-8fc6-4108-8d6e-b9268064a682" containerID="5767378902da46607cbbeabe217828cba0e306d61d97f76d6d350497f23dd379" exitCode=0 Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.100107 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jj6v7" event={"ID":"2d3abc18-8fc6-4108-8d6e-b9268064a682","Type":"ContainerDied","Data":"5767378902da46607cbbeabe217828cba0e306d61d97f76d6d350497f23dd379"} Jan 26 00:14:19 crc kubenswrapper[4975]: I0126 00:14:19.100134 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jj6v7" event={"ID":"2d3abc18-8fc6-4108-8d6e-b9268064a682","Type":"ContainerStarted","Data":"3a3f79fa134f963b8fef2c539c896c98682ddae2f68bcefebbc6c1102f2584c5"} Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.116840 4975 generic.go:334] "Generic (PLEG): container finished" podID="148781fe-d5ca-4956-822b-0bf9b8ba18d2" containerID="190b02564ef7347d4d61a21637bdaecdd03aaf906517fc3313cca68f08ba817e" exitCode=0 Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.117438 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xnqv" event={"ID":"148781fe-d5ca-4956-822b-0bf9b8ba18d2","Type":"ContainerDied","Data":"190b02564ef7347d4d61a21637bdaecdd03aaf906517fc3313cca68f08ba817e"} Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.120896 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxssn" event={"ID":"b81873b5-3563-4628-9cea-2e3837b4038c","Type":"ContainerStarted","Data":"07f7747ff68005da3d2a5e3e016d32efa0c7879e2eaf301f74e8a5abe818effb"} Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.124706 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jj6v7" event={"ID":"2d3abc18-8fc6-4108-8d6e-b9268064a682","Type":"ContainerStarted","Data":"1f11978ce2943a928304418204794783e1699ab3d931d0be59c1f05a1cc7b8f8"} Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.127528 4975 generic.go:334] "Generic (PLEG): container finished" podID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerID="57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9" exitCode=0 Jan 26 00:14:20 crc kubenswrapper[4975]: I0126 00:14:20.127570 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerDied","Data":"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9"} Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.136033 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9xnqv" event={"ID":"148781fe-d5ca-4956-822b-0bf9b8ba18d2","Type":"ContainerStarted","Data":"9f52e50719470684686351415d627fe71cb88a82a15ebe808cb91dc94d19338c"} Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.141209 4975 generic.go:334] "Generic (PLEG): container finished" podID="b81873b5-3563-4628-9cea-2e3837b4038c" containerID="07f7747ff68005da3d2a5e3e016d32efa0c7879e2eaf301f74e8a5abe818effb" exitCode=0 Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.141293 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxssn" event={"ID":"b81873b5-3563-4628-9cea-2e3837b4038c","Type":"ContainerDied","Data":"07f7747ff68005da3d2a5e3e016d32efa0c7879e2eaf301f74e8a5abe818effb"} Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.145068 4975 generic.go:334] "Generic (PLEG): container finished" podID="2d3abc18-8fc6-4108-8d6e-b9268064a682" containerID="1f11978ce2943a928304418204794783e1699ab3d931d0be59c1f05a1cc7b8f8" exitCode=0 Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.145184 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jj6v7" event={"ID":"2d3abc18-8fc6-4108-8d6e-b9268064a682","Type":"ContainerDied","Data":"1f11978ce2943a928304418204794783e1699ab3d931d0be59c1f05a1cc7b8f8"} Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.151089 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerStarted","Data":"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933"} Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.173120 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9xnqv" podStartSLOduration=2.557413157 podStartE2EDuration="6.17309899s" podCreationTimestamp="2026-01-26 00:14:15 +0000 UTC" firstStartedPulling="2026-01-26 00:14:17.047209976 +0000 UTC m=+441.168415470" lastFinishedPulling="2026-01-26 00:14:20.662895769 +0000 UTC m=+444.784101303" observedRunningTime="2026-01-26 00:14:21.169927391 +0000 UTC m=+445.291132885" watchObservedRunningTime="2026-01-26 00:14:21.17309899 +0000 UTC m=+445.294304504" Jan 26 00:14:21 crc kubenswrapper[4975]: I0126 00:14:21.221970 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dfpjb" podStartSLOduration=2.395486861 podStartE2EDuration="6.221949858s" podCreationTimestamp="2026-01-26 00:14:15 +0000 UTC" firstStartedPulling="2026-01-26 00:14:17.058031939 +0000 UTC m=+441.179237443" lastFinishedPulling="2026-01-26 00:14:20.884494936 +0000 UTC m=+445.005700440" observedRunningTime="2026-01-26 00:14:21.211434723 +0000 UTC m=+445.332640237" watchObservedRunningTime="2026-01-26 00:14:21.221949858 +0000 UTC m=+445.343155362" Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.060588 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-jw8lc" Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.164305 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xxssn" event={"ID":"b81873b5-3563-4628-9cea-2e3837b4038c","Type":"ContainerStarted","Data":"2e67068a658876d5cf03f8f17b11863e4572302afc15092fdecfc1f606cd1249"} Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.169714 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jj6v7" event={"ID":"2d3abc18-8fc6-4108-8d6e-b9268064a682","Type":"ContainerStarted","Data":"22a39c4ee26d025a127c5c3a3cf40dd32f438382f479c142bfe2cab1f94326a0"} Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.188891 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.211257 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xxssn" podStartSLOduration=1.699615938 podStartE2EDuration="4.211236157s" podCreationTimestamp="2026-01-26 00:14:18 +0000 UTC" firstStartedPulling="2026-01-26 00:14:19.094476819 +0000 UTC m=+443.215682313" lastFinishedPulling="2026-01-26 00:14:21.606097048 +0000 UTC m=+445.727302532" observedRunningTime="2026-01-26 00:14:22.201614288 +0000 UTC m=+446.322819792" watchObservedRunningTime="2026-01-26 00:14:22.211236157 +0000 UTC m=+446.332441651" Jan 26 00:14:22 crc kubenswrapper[4975]: I0126 00:14:22.242919 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jj6v7" podStartSLOduration=2.5994340620000003 podStartE2EDuration="5.242899404s" podCreationTimestamp="2026-01-26 00:14:17 +0000 UTC" firstStartedPulling="2026-01-26 00:14:19.101516716 +0000 UTC m=+443.222722210" lastFinishedPulling="2026-01-26 00:14:21.744982058 +0000 UTC m=+445.866187552" observedRunningTime="2026-01-26 00:14:22.241587598 +0000 UTC m=+446.362793092" watchObservedRunningTime="2026-01-26 00:14:22.242899404 +0000 UTC m=+446.364104898" Jan 26 00:14:25 crc kubenswrapper[4975]: I0126 00:14:25.896173 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:25 crc kubenswrapper[4975]: I0126 00:14:25.896657 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:25 crc kubenswrapper[4975]: I0126 00:14:25.949568 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:26 crc kubenswrapper[4975]: I0126 00:14:26.130817 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:26 crc kubenswrapper[4975]: I0126 00:14:26.131167 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:26 crc kubenswrapper[4975]: I0126 00:14:26.229165 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:14:27 crc kubenswrapper[4975]: I0126 00:14:27.185020 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9xnqv" podUID="148781fe-d5ca-4956-822b-0bf9b8ba18d2" containerName="registry-server" probeResult="failure" output=< Jan 26 00:14:27 crc kubenswrapper[4975]: timeout: failed to connect service ":50051" within 1s Jan 26 00:14:27 crc kubenswrapper[4975]: > Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.328949 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.329031 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.388186 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.477926 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.478072 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:28 crc kubenswrapper[4975]: I0126 00:14:28.527106 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:29 crc kubenswrapper[4975]: I0126 00:14:29.266458 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xxssn" Jan 26 00:14:29 crc kubenswrapper[4975]: I0126 00:14:29.270217 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jj6v7" Jan 26 00:14:36 crc kubenswrapper[4975]: I0126 00:14:36.171916 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:36 crc kubenswrapper[4975]: I0126 00:14:36.208649 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9xnqv" Jan 26 00:14:47 crc kubenswrapper[4975]: I0126 00:14:47.224901 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerName="registry" containerID="cri-o://8212396d902b9d79df60d0a101cb395096b2ec6d613f684276cea4582078481f" gracePeriod=30 Jan 26 00:14:48 crc kubenswrapper[4975]: I0126 00:14:48.911648 4975 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-vd6qz container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.27:5000/healthz\": dial tcp 10.217.0.27:5000: connect: connection refused" start-of-body= Jan 26 00:14:48 crc kubenswrapper[4975]: I0126 00:14:48.911793 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.27:5000/healthz\": dial tcp 10.217.0.27:5000: connect: connection refused" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.641965 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816206 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816592 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816656 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816699 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816802 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816952 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.816985 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhrs4\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.817013 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls\") pod \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\" (UID: \"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794\") " Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.818133 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.818253 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.821941 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4" (OuterVolumeSpecName: "kube-api-access-fhrs4") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "kube-api-access-fhrs4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.822115 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.823500 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.824180 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.828165 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.843908 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" (UID: "8f4a161d-43b8-44d0-a3a5-2d7f8c4da794"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.858193 4975 generic.go:334] "Generic (PLEG): container finished" podID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerID="8212396d902b9d79df60d0a101cb395096b2ec6d613f684276cea4582078481f" exitCode=0 Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.858271 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" event={"ID":"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794","Type":"ContainerDied","Data":"8212396d902b9d79df60d0a101cb395096b2ec6d613f684276cea4582078481f"} Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.858330 4975 scope.go:117] "RemoveContainer" containerID="8212396d902b9d79df60d0a101cb395096b2ec6d613f684276cea4582078481f" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919352 4975 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919418 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhrs4\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-kube-api-access-fhrs4\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919430 4975 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919440 4975 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919451 4975 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919461 4975 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:51 crc kubenswrapper[4975]: I0126 00:14:51.919470 4975 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 26 00:14:52 crc kubenswrapper[4975]: I0126 00:14:52.864803 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" event={"ID":"8f4a161d-43b8-44d0-a3a5-2d7f8c4da794","Type":"ContainerDied","Data":"b7c141cf31dee562a2a2e52a24959310abc5214af8ac37ad9b113366e3efc9dd"} Jan 26 00:14:52 crc kubenswrapper[4975]: I0126 00:14:52.864901 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-vd6qz" Jan 26 00:14:52 crc kubenswrapper[4975]: I0126 00:14:52.892057 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:14:52 crc kubenswrapper[4975]: I0126 00:14:52.900348 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-vd6qz"] Jan 26 00:14:54 crc kubenswrapper[4975]: I0126 00:14:54.157024 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" path="/var/lib/kubelet/pods/8f4a161d-43b8-44d0-a3a5-2d7f8c4da794/volumes" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.180586 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7"] Jan 26 00:15:00 crc kubenswrapper[4975]: E0126 00:15:00.181682 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerName="registry" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.181699 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerName="registry" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.181859 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f4a161d-43b8-44d0-a3a5-2d7f8c4da794" containerName="registry" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.182481 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.185625 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.186216 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.198628 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7"] Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.346306 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cggfx\" (UniqueName: \"kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.347191 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.347318 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.448834 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.448943 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.449115 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cggfx\" (UniqueName: \"kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.450301 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.457795 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.474011 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cggfx\" (UniqueName: \"kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx\") pod \"collect-profiles-29489775-f99g7\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.508766 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:00 crc kubenswrapper[4975]: I0126 00:15:00.931258 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7"] Jan 26 00:15:01 crc kubenswrapper[4975]: I0126 00:15:01.937674 4975 generic.go:334] "Generic (PLEG): container finished" podID="92d6a748-330b-4b5f-a1ea-8ece8bd0d929" containerID="07c0de3e467bd2d35bd0228c124cbf03776707b7ca1ba43a70e6d0fe945e8fd4" exitCode=0 Jan 26 00:15:01 crc kubenswrapper[4975]: I0126 00:15:01.937799 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" event={"ID":"92d6a748-330b-4b5f-a1ea-8ece8bd0d929","Type":"ContainerDied","Data":"07c0de3e467bd2d35bd0228c124cbf03776707b7ca1ba43a70e6d0fe945e8fd4"} Jan 26 00:15:01 crc kubenswrapper[4975]: I0126 00:15:01.938253 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" event={"ID":"92d6a748-330b-4b5f-a1ea-8ece8bd0d929","Type":"ContainerStarted","Data":"161eb86714983a188c638a09e366b185b7227f445910a7ac08e8eca6fc49f979"} Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.189417 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.286549 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume\") pod \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.286654 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cggfx\" (UniqueName: \"kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx\") pod \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.286824 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume\") pod \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\" (UID: \"92d6a748-330b-4b5f-a1ea-8ece8bd0d929\") " Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.287498 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume" (OuterVolumeSpecName: "config-volume") pod "92d6a748-330b-4b5f-a1ea-8ece8bd0d929" (UID: "92d6a748-330b-4b5f-a1ea-8ece8bd0d929"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.292834 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "92d6a748-330b-4b5f-a1ea-8ece8bd0d929" (UID: "92d6a748-330b-4b5f-a1ea-8ece8bd0d929"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.299939 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx" (OuterVolumeSpecName: "kube-api-access-cggfx") pod "92d6a748-330b-4b5f-a1ea-8ece8bd0d929" (UID: "92d6a748-330b-4b5f-a1ea-8ece8bd0d929"). InnerVolumeSpecName "kube-api-access-cggfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.389210 4975 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.389308 4975 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.389327 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cggfx\" (UniqueName: \"kubernetes.io/projected/92d6a748-330b-4b5f-a1ea-8ece8bd0d929-kube-api-access-cggfx\") on node \"crc\" DevicePath \"\"" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.951135 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" event={"ID":"92d6a748-330b-4b5f-a1ea-8ece8bd0d929","Type":"ContainerDied","Data":"161eb86714983a188c638a09e366b185b7227f445910a7ac08e8eca6fc49f979"} Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.951211 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="161eb86714983a188c638a09e366b185b7227f445910a7ac08e8eca6fc49f979" Jan 26 00:15:03 crc kubenswrapper[4975]: I0126 00:15:03.951221 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489775-f99g7" Jan 26 00:16:10 crc kubenswrapper[4975]: I0126 00:16:10.481465 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:16:10 crc kubenswrapper[4975]: I0126 00:16:10.482258 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:16:40 crc kubenswrapper[4975]: I0126 00:16:40.481355 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:16:40 crc kubenswrapper[4975]: I0126 00:16:40.482145 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.481920 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.482568 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.482635 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.483703 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.483837 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95" gracePeriod=600 Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.976036 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95" exitCode=0 Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.976088 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95"} Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.976340 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698"} Jan 26 00:17:10 crc kubenswrapper[4975]: I0126 00:17:10.976371 4975 scope.go:117] "RemoveContainer" containerID="652e6479f3dfdb1a3ca63514acfe0bfbaff4a6a65fef79d6dec5db4086c1400e" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.199822 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2vrv2"] Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.200828 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-controller" containerID="cri-o://9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.200854 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.200948 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="northd" containerID="cri-o://0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.200968 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-node" containerID="cri-o://7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.200895 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="nbdb" containerID="cri-o://9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.201051 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-acl-logging" containerID="cri-o://69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.201112 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="sbdb" containerID="cri-o://26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.234752 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" containerID="cri-o://ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" gracePeriod=30 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.549468 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/3.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.552410 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovn-acl-logging/0.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.553255 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovn-controller/0.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.553694 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586574 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586699 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586754 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586803 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586704 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586829 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586801 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586849 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586874 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586910 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586938 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmrkv\" (UniqueName: \"kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586960 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.586982 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587011 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587031 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587053 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587080 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587093 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket" (OuterVolumeSpecName: "log-socket") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587145 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587169 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587116 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587234 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587254 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587273 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587296 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587335 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns\") pod \"3fd68329-6540-4965-a036-ddd1045f1190\" (UID: \"3fd68329-6540-4965-a036-ddd1045f1190\") " Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587293 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587544 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log" (OuterVolumeSpecName: "node-log") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587825 4975 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587898 4975 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587943 4975 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-log-socket\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587960 4975 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587975 4975 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587988 4975 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-node-log\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588032 4975 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588049 4975 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587894 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587940 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587939 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587972 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.587987 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash" (OuterVolumeSpecName: "host-slash") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588003 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588207 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588243 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.588306 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.594336 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv" (OuterVolumeSpecName: "kube-api-access-rmrkv") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "kube-api-access-rmrkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.597220 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.603167 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "3fd68329-6540-4965-a036-ddd1045f1190" (UID: "3fd68329-6540-4965-a036-ddd1045f1190"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.607945 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-zgpjs"] Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608185 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608201 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608211 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608217 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608227 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608234 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608243 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="nbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608248 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="nbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608256 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="sbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608264 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="sbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608273 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608278 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608286 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92d6a748-330b-4b5f-a1ea-8ece8bd0d929" containerName="collect-profiles" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608291 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="92d6a748-330b-4b5f-a1ea-8ece8bd0d929" containerName="collect-profiles" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608301 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="northd" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608307 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="northd" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608315 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608320 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608329 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kubecfg-setup" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608334 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kubecfg-setup" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608343 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-node" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608349 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-node" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608358 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608363 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608370 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-acl-logging" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608376 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-acl-logging" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608467 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608476 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="sbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608485 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="northd" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608496 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-acl-logging" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608504 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-node" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608514 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="nbdb" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608523 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="92d6a748-330b-4b5f-a1ea-8ece8bd0d929" containerName="collect-profiles" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608531 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="kube-rbac-proxy-ovn-metrics" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608539 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608546 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608554 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovn-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.608644 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608652 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608761 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.608775 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fd68329-6540-4965-a036-ddd1045f1190" containerName="ovnkube-controller" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.610448 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.688538 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-netd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.688609 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-ovn\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.688725 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-log-socket\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.688884 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-bin\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.688957 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.689006 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-systemd-units\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.689057 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-kubelet\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.689122 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-systemd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.689881 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-netns\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690055 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-node-log\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690188 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-694s7\" (UniqueName: \"kubernetes.io/projected/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-kube-api-access-694s7\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690254 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690301 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690334 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-slash\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690365 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovn-node-metrics-cert\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690403 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-env-overrides\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690458 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-var-lib-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690496 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-config\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690518 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-script-lib\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690555 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-etc-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690719 4975 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690746 4975 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690757 4975 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690775 4975 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690787 4975 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/3fd68329-6540-4965-a036-ddd1045f1190-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690802 4975 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690813 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmrkv\" (UniqueName: \"kubernetes.io/projected/3fd68329-6540-4965-a036-ddd1045f1190-kube-api-access-rmrkv\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690824 4975 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690834 4975 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/3fd68329-6540-4965-a036-ddd1045f1190-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690847 4975 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690857 4975 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-slash\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.690871 4975 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3fd68329-6540-4965-a036-ddd1045f1190-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.705879 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/2.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.706473 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/1.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.706526 4975 generic.go:334] "Generic (PLEG): container finished" podID="7d3cba21-428c-4151-bb16-f3478d54c90e" containerID="291b8b404b3282be2bbfe47022cd28cb25ec61846c02573816da8fbe453e002a" exitCode=2 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.706602 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerDied","Data":"291b8b404b3282be2bbfe47022cd28cb25ec61846c02573816da8fbe453e002a"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.706659 4975 scope.go:117] "RemoveContainer" containerID="c6cc951f021693f452e15865d8fed4a5318e88a5a4c778f8d9dbf6464fde061b" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.707481 4975 scope.go:117] "RemoveContainer" containerID="291b8b404b3282be2bbfe47022cd28cb25ec61846c02573816da8fbe453e002a" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.708188 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-bcsb4_openshift-multus(7d3cba21-428c-4151-bb16-f3478d54c90e)\"" pod="openshift-multus/multus-bcsb4" podUID="7d3cba21-428c-4151-bb16-f3478d54c90e" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.709441 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovnkube-controller/3.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.713086 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovn-acl-logging/0.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714004 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2vrv2_3fd68329-6540-4965-a036-ddd1045f1190/ovn-controller/0.log" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714455 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714486 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714496 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714507 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714517 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714508 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714581 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714600 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714614 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714627 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714642 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714641 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714661 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714681 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714529 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" exitCode=0 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714719 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" exitCode=143 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714772 4975 generic.go:334] "Generic (PLEG): container finished" podID="3fd68329-6540-4965-a036-ddd1045f1190" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" exitCode=143 Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714689 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714801 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714810 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714819 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714827 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714834 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714841 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714851 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714865 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714881 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714889 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714896 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714903 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714910 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714916 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714925 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714934 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714947 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714954 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714964 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714980 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.714992 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715000 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715008 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715016 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715024 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715032 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715041 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715047 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715055 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715066 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2vrv2" event={"ID":"3fd68329-6540-4965-a036-ddd1045f1190","Type":"ContainerDied","Data":"6b518071003e1a2cc17fda3b3c14c83f7f20b84c9da657d4f026e09f7ac2e4f1"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715078 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715089 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715098 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715105 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715112 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715119 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715126 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715134 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715142 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.715149 4975 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.744890 4975 scope.go:117] "RemoveContainer" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.766233 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2vrv2"] Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.771855 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2vrv2"] Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.773588 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791192 4975 scope.go:117] "RemoveContainer" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791439 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-bin\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791485 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791517 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-systemd-units\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791548 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-kubelet\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791573 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791577 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-netns\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791605 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-netns\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791576 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-bin\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791633 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-systemd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791671 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-systemd-units\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791671 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-kubelet\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791685 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-694s7\" (UniqueName: \"kubernetes.io/projected/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-kube-api-access-694s7\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791722 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-systemd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791792 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-node-log\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791843 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-node-log\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791848 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791892 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-run-ovn-kubernetes\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791907 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791932 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-slash\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.791982 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovn-node-metrics-cert\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792017 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-env-overrides\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792027 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792060 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-slash\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792091 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-var-lib-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792069 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-var-lib-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792140 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-config\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792214 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-script-lib\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792271 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-etc-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792430 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-netd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792489 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-ovn\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792535 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-log-socket\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792627 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-log-socket\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.792991 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-etc-openvswitch\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.793049 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-host-cni-netd\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.793094 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-run-ovn\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.793140 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-config\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.793205 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-env-overrides\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.793281 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovnkube-script-lib\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.795096 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-ovn-node-metrics-cert\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.807811 4975 scope.go:117] "RemoveContainer" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.815839 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-694s7\" (UniqueName: \"kubernetes.io/projected/a56a8901-4ffa-412f-88fa-5ffed25f7fd2-kube-api-access-694s7\") pod \"ovnkube-node-zgpjs\" (UID: \"a56a8901-4ffa-412f-88fa-5ffed25f7fd2\") " pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.820955 4975 scope.go:117] "RemoveContainer" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.835474 4975 scope.go:117] "RemoveContainer" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.850377 4975 scope.go:117] "RemoveContainer" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.862768 4975 scope.go:117] "RemoveContainer" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.878793 4975 scope.go:117] "RemoveContainer" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.893227 4975 scope.go:117] "RemoveContainer" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.905667 4975 scope.go:117] "RemoveContainer" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.906116 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": container with ID starting with ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5 not found: ID does not exist" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.906155 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} err="failed to get container status \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": rpc error: code = NotFound desc = could not find container \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": container with ID starting with ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.906180 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.906482 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": container with ID starting with 61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4 not found: ID does not exist" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.906546 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} err="failed to get container status \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": rpc error: code = NotFound desc = could not find container \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": container with ID starting with 61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.906607 4975 scope.go:117] "RemoveContainer" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.907419 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": container with ID starting with 26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9 not found: ID does not exist" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.907459 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} err="failed to get container status \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": rpc error: code = NotFound desc = could not find container \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": container with ID starting with 26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.907475 4975 scope.go:117] "RemoveContainer" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.907843 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": container with ID starting with 9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c not found: ID does not exist" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.907884 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} err="failed to get container status \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": rpc error: code = NotFound desc = could not find container \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": container with ID starting with 9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.907917 4975 scope.go:117] "RemoveContainer" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.908214 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": container with ID starting with 0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2 not found: ID does not exist" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908237 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} err="failed to get container status \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": rpc error: code = NotFound desc = could not find container \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": container with ID starting with 0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908254 4975 scope.go:117] "RemoveContainer" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.908493 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": container with ID starting with d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2 not found: ID does not exist" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908530 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} err="failed to get container status \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": rpc error: code = NotFound desc = could not find container \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": container with ID starting with d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908545 4975 scope.go:117] "RemoveContainer" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.908843 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": container with ID starting with 7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14 not found: ID does not exist" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908862 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} err="failed to get container status \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": rpc error: code = NotFound desc = could not find container \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": container with ID starting with 7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.908877 4975 scope.go:117] "RemoveContainer" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.909158 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": container with ID starting with 69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364 not found: ID does not exist" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.909184 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} err="failed to get container status \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": rpc error: code = NotFound desc = could not find container \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": container with ID starting with 69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.909202 4975 scope.go:117] "RemoveContainer" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.909517 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": container with ID starting with 9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635 not found: ID does not exist" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.909570 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} err="failed to get container status \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": rpc error: code = NotFound desc = could not find container \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": container with ID starting with 9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.909603 4975 scope.go:117] "RemoveContainer" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: E0126 00:19:01.910050 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": container with ID starting with 4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3 not found: ID does not exist" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910076 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} err="failed to get container status \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": rpc error: code = NotFound desc = could not find container \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": container with ID starting with 4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910094 4975 scope.go:117] "RemoveContainer" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910318 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} err="failed to get container status \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": rpc error: code = NotFound desc = could not find container \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": container with ID starting with ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910335 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910625 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} err="failed to get container status \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": rpc error: code = NotFound desc = could not find container \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": container with ID starting with 61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.910646 4975 scope.go:117] "RemoveContainer" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911015 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} err="failed to get container status \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": rpc error: code = NotFound desc = could not find container \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": container with ID starting with 26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911038 4975 scope.go:117] "RemoveContainer" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911273 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} err="failed to get container status \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": rpc error: code = NotFound desc = could not find container \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": container with ID starting with 9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911294 4975 scope.go:117] "RemoveContainer" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911514 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} err="failed to get container status \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": rpc error: code = NotFound desc = could not find container \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": container with ID starting with 0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911543 4975 scope.go:117] "RemoveContainer" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911808 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} err="failed to get container status \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": rpc error: code = NotFound desc = could not find container \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": container with ID starting with d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.911828 4975 scope.go:117] "RemoveContainer" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912061 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} err="failed to get container status \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": rpc error: code = NotFound desc = could not find container \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": container with ID starting with 7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912081 4975 scope.go:117] "RemoveContainer" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912381 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} err="failed to get container status \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": rpc error: code = NotFound desc = could not find container \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": container with ID starting with 69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912427 4975 scope.go:117] "RemoveContainer" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912811 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} err="failed to get container status \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": rpc error: code = NotFound desc = could not find container \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": container with ID starting with 9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.912832 4975 scope.go:117] "RemoveContainer" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913056 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} err="failed to get container status \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": rpc error: code = NotFound desc = could not find container \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": container with ID starting with 4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913084 4975 scope.go:117] "RemoveContainer" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913313 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} err="failed to get container status \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": rpc error: code = NotFound desc = could not find container \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": container with ID starting with ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913332 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913596 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} err="failed to get container status \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": rpc error: code = NotFound desc = could not find container \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": container with ID starting with 61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913615 4975 scope.go:117] "RemoveContainer" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913967 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} err="failed to get container status \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": rpc error: code = NotFound desc = could not find container \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": container with ID starting with 26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.913990 4975 scope.go:117] "RemoveContainer" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914218 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} err="failed to get container status \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": rpc error: code = NotFound desc = could not find container \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": container with ID starting with 9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914235 4975 scope.go:117] "RemoveContainer" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914432 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} err="failed to get container status \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": rpc error: code = NotFound desc = could not find container \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": container with ID starting with 0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914449 4975 scope.go:117] "RemoveContainer" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914680 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} err="failed to get container status \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": rpc error: code = NotFound desc = could not find container \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": container with ID starting with d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914696 4975 scope.go:117] "RemoveContainer" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914959 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} err="failed to get container status \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": rpc error: code = NotFound desc = could not find container \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": container with ID starting with 7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.914979 4975 scope.go:117] "RemoveContainer" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915212 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} err="failed to get container status \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": rpc error: code = NotFound desc = could not find container \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": container with ID starting with 69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915229 4975 scope.go:117] "RemoveContainer" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915431 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} err="failed to get container status \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": rpc error: code = NotFound desc = could not find container \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": container with ID starting with 9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915450 4975 scope.go:117] "RemoveContainer" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915688 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} err="failed to get container status \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": rpc error: code = NotFound desc = could not find container \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": container with ID starting with 4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915709 4975 scope.go:117] "RemoveContainer" containerID="ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915930 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5"} err="failed to get container status \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": rpc error: code = NotFound desc = could not find container \"ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5\": container with ID starting with ba860e040e7917c58c3503bfad381513226ee3403b3efd39712ceba43d7dd3b5 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.915950 4975 scope.go:117] "RemoveContainer" containerID="61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.916168 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4"} err="failed to get container status \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": rpc error: code = NotFound desc = could not find container \"61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4\": container with ID starting with 61556d840942a6676aa1278d24987707bd21a3cc1552d7f4e77d0f80d378e4f4 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.916187 4975 scope.go:117] "RemoveContainer" containerID="26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.916933 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9"} err="failed to get container status \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": rpc error: code = NotFound desc = could not find container \"26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9\": container with ID starting with 26676b64a34cb5c342d02a18e4a01d111760e858d4aa9579754a31d329dc5ee9 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.916951 4975 scope.go:117] "RemoveContainer" containerID="9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917164 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c"} err="failed to get container status \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": rpc error: code = NotFound desc = could not find container \"9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c\": container with ID starting with 9af899724a3b3be895b911bc9fddb3ab301bcd92e60318c46f5a481a02baee3c not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917192 4975 scope.go:117] "RemoveContainer" containerID="0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917404 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2"} err="failed to get container status \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": rpc error: code = NotFound desc = could not find container \"0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2\": container with ID starting with 0f9a22a92a5b557a7e1856e11b205bd8c538ad92416bb9c021af175d4b5257a2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917427 4975 scope.go:117] "RemoveContainer" containerID="d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917624 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2"} err="failed to get container status \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": rpc error: code = NotFound desc = could not find container \"d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2\": container with ID starting with d98dffe626c93f8a76d17c188e32ecd69849c25851cca8be4f23b57d4a8246b2 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917647 4975 scope.go:117] "RemoveContainer" containerID="7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917977 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14"} err="failed to get container status \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": rpc error: code = NotFound desc = could not find container \"7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14\": container with ID starting with 7f3fd1da6c8cce02e04c4feddd41a76781840c923ece75ea70dead73cc30de14 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.917999 4975 scope.go:117] "RemoveContainer" containerID="69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.918313 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364"} err="failed to get container status \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": rpc error: code = NotFound desc = could not find container \"69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364\": container with ID starting with 69e63ce6ff0a998d74b493813859f7523ffb0f3abab175d231287449b7f17364 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.918371 4975 scope.go:117] "RemoveContainer" containerID="9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.918633 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635"} err="failed to get container status \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": rpc error: code = NotFound desc = could not find container \"9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635\": container with ID starting with 9d6d8ae3f6ca9691b957112007632d93b47af00d114e19b9a3dd24362373b635 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.918654 4975 scope.go:117] "RemoveContainer" containerID="4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.918937 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3"} err="failed to get container status \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": rpc error: code = NotFound desc = could not find container \"4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3\": container with ID starting with 4265d66f28968e9a418cd20134ef6fa4a91ad0b04b96a51ae49a8ad5a22716d3 not found: ID does not exist" Jan 26 00:19:01 crc kubenswrapper[4975]: I0126 00:19:01.925450 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:02 crc kubenswrapper[4975]: I0126 00:19:02.156604 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fd68329-6540-4965-a036-ddd1045f1190" path="/var/lib/kubelet/pods/3fd68329-6540-4965-a036-ddd1045f1190/volumes" Jan 26 00:19:02 crc kubenswrapper[4975]: I0126 00:19:02.724288 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/2.log" Jan 26 00:19:02 crc kubenswrapper[4975]: I0126 00:19:02.727710 4975 generic.go:334] "Generic (PLEG): container finished" podID="a56a8901-4ffa-412f-88fa-5ffed25f7fd2" containerID="0ab81765f7451e18a90b6fe81d68eaa32218def4d31fcfc85df5345e715f61ba" exitCode=0 Jan 26 00:19:02 crc kubenswrapper[4975]: I0126 00:19:02.727765 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerDied","Data":"0ab81765f7451e18a90b6fe81d68eaa32218def4d31fcfc85df5345e715f61ba"} Jan 26 00:19:02 crc kubenswrapper[4975]: I0126 00:19:02.727804 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"f136a678fa4ca7ab1f7f38ed353f9b2de2b4a649f742c37dbab12332ac850745"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736390 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"0c36af6cb55a86b6a49af2466a83ee0bae17ad0fcb5c3349ebacbaec2ea632fd"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736725 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"5456f5d791ac2f048f891d5277d56b734d3bb750fe8f7070b0453cc9f3af4f9b"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736760 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"88b823ef5bfc239c4b3f5c4273906f5377cdd437def07cc99d44de87f23a4829"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736770 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"7df8e45443f48fffaca124f2f58a74f3e20ee60b55df5dc0fd830bda51e9e8aa"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736779 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"0742d25126dc624b0624e41e8be1691ceced218cf249201d6723fa922948f44b"} Jan 26 00:19:03 crc kubenswrapper[4975]: I0126 00:19:03.736787 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"1a89229173c67c939d6d2b67cfaeefc1cde08b3420ab80c7ee35506ed0666d5e"} Jan 26 00:19:05 crc kubenswrapper[4975]: I0126 00:19:05.752937 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"bf707e2ab3464dbbca74db3b6eef493c5f5e15a3c94aefa69c744d426e712bae"} Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.778722 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" event={"ID":"a56a8901-4ffa-412f-88fa-5ffed25f7fd2","Type":"ContainerStarted","Data":"749dde334b088c3cdf3ff409666c46282e950856ea1c5be8fb987e03e338397e"} Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.779565 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.779769 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.779788 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.809608 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.810081 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:08 crc kubenswrapper[4975]: I0126 00:19:08.825705 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" podStartSLOduration=7.825687975 podStartE2EDuration="7.825687975s" podCreationTimestamp="2026-01-26 00:19:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:19:08.823036341 +0000 UTC m=+732.944241835" watchObservedRunningTime="2026-01-26 00:19:08.825687975 +0000 UTC m=+732.946893489" Jan 26 00:19:10 crc kubenswrapper[4975]: I0126 00:19:10.481662 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:19:10 crc kubenswrapper[4975]: I0126 00:19:10.481743 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:19:12 crc kubenswrapper[4975]: I0126 00:19:12.147381 4975 scope.go:117] "RemoveContainer" containerID="291b8b404b3282be2bbfe47022cd28cb25ec61846c02573816da8fbe453e002a" Jan 26 00:19:12 crc kubenswrapper[4975]: I0126 00:19:12.803758 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bcsb4_7d3cba21-428c-4151-bb16-f3478d54c90e/kube-multus/2.log" Jan 26 00:19:12 crc kubenswrapper[4975]: I0126 00:19:12.804063 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bcsb4" event={"ID":"7d3cba21-428c-4151-bb16-f3478d54c90e","Type":"ContainerStarted","Data":"6a890e2a30f0a16b4f6dad4d8cd62d874263d80e314010312323e86e76a3df61"} Jan 26 00:19:31 crc kubenswrapper[4975]: I0126 00:19:31.948192 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-zgpjs" Jan 26 00:19:35 crc kubenswrapper[4975]: I0126 00:19:35.337247 4975 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 26 00:19:40 crc kubenswrapper[4975]: I0126 00:19:40.481875 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:19:40 crc kubenswrapper[4975]: I0126 00:19:40.482611 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:19:56 crc kubenswrapper[4975]: I0126 00:19:56.470467 4975 scope.go:117] "RemoveContainer" containerID="61d4d9a40687aad73d659ea89392d91e23d9ecf47eebee42d38e5b061389caf7" Jan 26 00:20:10 crc kubenswrapper[4975]: I0126 00:20:10.481864 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:20:10 crc kubenswrapper[4975]: I0126 00:20:10.482529 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:20:10 crc kubenswrapper[4975]: I0126 00:20:10.482614 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:20:10 crc kubenswrapper[4975]: I0126 00:20:10.483543 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:20:10 crc kubenswrapper[4975]: I0126 00:20:10.483671 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698" gracePeriod=600 Jan 26 00:20:11 crc kubenswrapper[4975]: I0126 00:20:11.189077 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698" exitCode=0 Jan 26 00:20:11 crc kubenswrapper[4975]: I0126 00:20:11.189265 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698"} Jan 26 00:20:11 crc kubenswrapper[4975]: I0126 00:20:11.189468 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515"} Jan 26 00:20:11 crc kubenswrapper[4975]: I0126 00:20:11.189517 4975 scope.go:117] "RemoveContainer" containerID="cccf552ba56114d4c5f8e6819d6e68fabbcfcb0d932f9e3a0f3f9004cc92de95" Jan 26 00:20:27 crc kubenswrapper[4975]: I0126 00:20:27.576226 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:20:27 crc kubenswrapper[4975]: I0126 00:20:27.577082 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dfpjb" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="registry-server" containerID="cri-o://4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933" gracePeriod=30 Jan 26 00:20:27 crc kubenswrapper[4975]: I0126 00:20:27.968931 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.075471 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content\") pod \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.075804 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xplfl\" (UniqueName: \"kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl\") pod \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.075868 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities\") pod \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\" (UID: \"69d5ae7e-3538-4c88-a51c-da93562ba9f8\") " Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.076971 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities" (OuterVolumeSpecName: "utilities") pod "69d5ae7e-3538-4c88-a51c-da93562ba9f8" (UID: "69d5ae7e-3538-4c88-a51c-da93562ba9f8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.082807 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl" (OuterVolumeSpecName: "kube-api-access-xplfl") pod "69d5ae7e-3538-4c88-a51c-da93562ba9f8" (UID: "69d5ae7e-3538-4c88-a51c-da93562ba9f8"). InnerVolumeSpecName "kube-api-access-xplfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.098950 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69d5ae7e-3538-4c88-a51c-da93562ba9f8" (UID: "69d5ae7e-3538-4c88-a51c-da93562ba9f8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.177863 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.177900 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xplfl\" (UniqueName: \"kubernetes.io/projected/69d5ae7e-3538-4c88-a51c-da93562ba9f8-kube-api-access-xplfl\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.177913 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69d5ae7e-3538-4c88-a51c-da93562ba9f8-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.293413 4975 generic.go:334] "Generic (PLEG): container finished" podID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerID="4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933" exitCode=0 Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.293465 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerDied","Data":"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933"} Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.293486 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dfpjb" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.293511 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dfpjb" event={"ID":"69d5ae7e-3538-4c88-a51c-da93562ba9f8","Type":"ContainerDied","Data":"5106853f9f2c0843857a1217689f2c316c8587294712c1bb366bd9c78057d2c1"} Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.293534 4975 scope.go:117] "RemoveContainer" containerID="4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.318405 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.320283 4975 scope.go:117] "RemoveContainer" containerID="57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.323454 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dfpjb"] Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.338463 4975 scope.go:117] "RemoveContainer" containerID="378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.362960 4975 scope.go:117] "RemoveContainer" containerID="4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933" Jan 26 00:20:28 crc kubenswrapper[4975]: E0126 00:20:28.363585 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933\": container with ID starting with 4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933 not found: ID does not exist" containerID="4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.363652 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933"} err="failed to get container status \"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933\": rpc error: code = NotFound desc = could not find container \"4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933\": container with ID starting with 4478285381b30f4af9120ce0b8a537d1ae707cca0c1c83d34f9cca80bdeb2933 not found: ID does not exist" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.363698 4975 scope.go:117] "RemoveContainer" containerID="57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9" Jan 26 00:20:28 crc kubenswrapper[4975]: E0126 00:20:28.364487 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9\": container with ID starting with 57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9 not found: ID does not exist" containerID="57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.364516 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9"} err="failed to get container status \"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9\": rpc error: code = NotFound desc = could not find container \"57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9\": container with ID starting with 57e91198d62ea27afa0af8d2d0048bd0643ad5c494ad4ed6f3f0ddd78b89e5e9 not found: ID does not exist" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.364541 4975 scope.go:117] "RemoveContainer" containerID="378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5" Jan 26 00:20:28 crc kubenswrapper[4975]: E0126 00:20:28.365033 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5\": container with ID starting with 378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5 not found: ID does not exist" containerID="378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5" Jan 26 00:20:28 crc kubenswrapper[4975]: I0126 00:20:28.365071 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5"} err="failed to get container status \"378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5\": rpc error: code = NotFound desc = could not find container \"378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5\": container with ID starting with 378844eb08cc47a4a720edfbe6f8e6c2574adff090f66e34b27b4cba5a216bc5 not found: ID does not exist" Jan 26 00:20:30 crc kubenswrapper[4975]: I0126 00:20:30.154715 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" path="/var/lib/kubelet/pods/69d5ae7e-3538-4c88-a51c-da93562ba9f8/volumes" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.817973 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww"] Jan 26 00:20:32 crc kubenswrapper[4975]: E0126 00:20:32.819324 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="registry-server" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.819418 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="registry-server" Jan 26 00:20:32 crc kubenswrapper[4975]: E0126 00:20:32.819501 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="extract-utilities" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.819573 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="extract-utilities" Jan 26 00:20:32 crc kubenswrapper[4975]: E0126 00:20:32.819647 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="extract-content" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.819725 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="extract-content" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.819938 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="69d5ae7e-3538-4c88-a51c-da93562ba9f8" containerName="registry-server" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.820992 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.823854 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.834328 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww"] Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.994524 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.994580 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:32 crc kubenswrapper[4975]: I0126 00:20:32.994621 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb9hg\" (UniqueName: \"kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.095756 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.095812 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.095842 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb9hg\" (UniqueName: \"kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.096371 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.097033 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.116841 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb9hg\" (UniqueName: \"kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.141330 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:33 crc kubenswrapper[4975]: I0126 00:20:33.550854 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww"] Jan 26 00:20:34 crc kubenswrapper[4975]: I0126 00:20:34.433015 4975 generic.go:334] "Generic (PLEG): container finished" podID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerID="875dab786d8d3dfea06245944a1e91fe53c4a5741e6b4e2b8ca4b5de45cc6a9c" exitCode=0 Jan 26 00:20:34 crc kubenswrapper[4975]: I0126 00:20:34.433260 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" event={"ID":"4cb0fa5f-6596-49f6-8925-d4c56b43a433","Type":"ContainerDied","Data":"875dab786d8d3dfea06245944a1e91fe53c4a5741e6b4e2b8ca4b5de45cc6a9c"} Jan 26 00:20:34 crc kubenswrapper[4975]: I0126 00:20:34.433356 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" event={"ID":"4cb0fa5f-6596-49f6-8925-d4c56b43a433","Type":"ContainerStarted","Data":"a8dc315fbe0083a959775aaf674878d39572e86793761be09ee6ba52ee80bba9"} Jan 26 00:20:34 crc kubenswrapper[4975]: I0126 00:20:34.435762 4975 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.175481 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.176528 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.189341 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.339582 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.339639 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.339708 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p687v\" (UniqueName: \"kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.440998 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.441359 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.441410 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p687v\" (UniqueName: \"kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.441493 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.441749 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.447897 4975 generic.go:334] "Generic (PLEG): container finished" podID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerID="440043f40b791b2ff8f74ea82ae45c4a4ef3443e9a46ad7c9678675e7cfe017b" exitCode=0 Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.447942 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" event={"ID":"4cb0fa5f-6596-49f6-8925-d4c56b43a433","Type":"ContainerDied","Data":"440043f40b791b2ff8f74ea82ae45c4a4ef3443e9a46ad7c9678675e7cfe017b"} Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.476979 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p687v\" (UniqueName: \"kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v\") pod \"redhat-operators-xzvfg\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.510650 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:36 crc kubenswrapper[4975]: I0126 00:20:36.737682 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:20:36 crc kubenswrapper[4975]: W0126 00:20:36.739370 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1ffb7239_96f9_4a22_99af_9defbc219e6b.slice/crio-b4530e80ec22539ac1c6422f5d4918506a4933d729826ff269d07de4a47174d7 WatchSource:0}: Error finding container b4530e80ec22539ac1c6422f5d4918506a4933d729826ff269d07de4a47174d7: Status 404 returned error can't find the container with id b4530e80ec22539ac1c6422f5d4918506a4933d729826ff269d07de4a47174d7 Jan 26 00:20:37 crc kubenswrapper[4975]: I0126 00:20:37.454644 4975 generic.go:334] "Generic (PLEG): container finished" podID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerID="e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4" exitCode=0 Jan 26 00:20:37 crc kubenswrapper[4975]: I0126 00:20:37.454796 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerDied","Data":"e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4"} Jan 26 00:20:37 crc kubenswrapper[4975]: I0126 00:20:37.456360 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerStarted","Data":"b4530e80ec22539ac1c6422f5d4918506a4933d729826ff269d07de4a47174d7"} Jan 26 00:20:37 crc kubenswrapper[4975]: I0126 00:20:37.460010 4975 generic.go:334] "Generic (PLEG): container finished" podID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerID="7f8f654f1cd6db753f224cf125a652be02a6fe54d35a2b37d33e83d60cead611" exitCode=0 Jan 26 00:20:37 crc kubenswrapper[4975]: I0126 00:20:37.460110 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" event={"ID":"4cb0fa5f-6596-49f6-8925-d4c56b43a433","Type":"ContainerDied","Data":"7f8f654f1cd6db753f224cf125a652be02a6fe54d35a2b37d33e83d60cead611"} Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.466915 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerStarted","Data":"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca"} Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.686988 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.880201 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util\") pod \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.880250 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb9hg\" (UniqueName: \"kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg\") pod \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.880330 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle\") pod \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\" (UID: \"4cb0fa5f-6596-49f6-8925-d4c56b43a433\") " Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.882795 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle" (OuterVolumeSpecName: "bundle") pod "4cb0fa5f-6596-49f6-8925-d4c56b43a433" (UID: "4cb0fa5f-6596-49f6-8925-d4c56b43a433"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.886858 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg" (OuterVolumeSpecName: "kube-api-access-kb9hg") pod "4cb0fa5f-6596-49f6-8925-d4c56b43a433" (UID: "4cb0fa5f-6596-49f6-8925-d4c56b43a433"). InnerVolumeSpecName "kube-api-access-kb9hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.907495 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util" (OuterVolumeSpecName: "util") pod "4cb0fa5f-6596-49f6-8925-d4c56b43a433" (UID: "4cb0fa5f-6596-49f6-8925-d4c56b43a433"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.981929 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.981964 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb9hg\" (UniqueName: \"kubernetes.io/projected/4cb0fa5f-6596-49f6-8925-d4c56b43a433-kube-api-access-kb9hg\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:38 crc kubenswrapper[4975]: I0126 00:20:38.981985 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4cb0fa5f-6596-49f6-8925-d4c56b43a433-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.216252 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc"] Jan 26 00:20:39 crc kubenswrapper[4975]: E0126 00:20:39.216791 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="pull" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.216880 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="pull" Jan 26 00:20:39 crc kubenswrapper[4975]: E0126 00:20:39.216951 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="extract" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.217009 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="extract" Jan 26 00:20:39 crc kubenswrapper[4975]: E0126 00:20:39.217067 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="util" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.217137 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="util" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.217336 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="4cb0fa5f-6596-49f6-8925-d4c56b43a433" containerName="extract" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.218352 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.232505 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc"] Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.285053 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.285111 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgcbh\" (UniqueName: \"kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.285152 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.385834 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.385876 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgcbh\" (UniqueName: \"kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.385929 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.386497 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.386545 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.405210 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgcbh\" (UniqueName: \"kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.474583 4975 generic.go:334] "Generic (PLEG): container finished" podID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerID="6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca" exitCode=0 Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.474656 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerDied","Data":"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca"} Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.482146 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" event={"ID":"4cb0fa5f-6596-49f6-8925-d4c56b43a433","Type":"ContainerDied","Data":"a8dc315fbe0083a959775aaf674878d39572e86793761be09ee6ba52ee80bba9"} Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.482187 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8dc315fbe0083a959775aaf674878d39572e86793761be09ee6ba52ee80bba9" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.482289 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.530443 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:39 crc kubenswrapper[4975]: I0126 00:20:39.714861 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc"] Jan 26 00:20:39 crc kubenswrapper[4975]: W0126 00:20:39.726049 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddd41d02b_525a_4ffd_ace0_ba6fde8853e2.slice/crio-22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432 WatchSource:0}: Error finding container 22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432: Status 404 returned error can't find the container with id 22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432 Jan 26 00:20:40 crc kubenswrapper[4975]: I0126 00:20:40.488890 4975 generic.go:334] "Generic (PLEG): container finished" podID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerID="de7d086434dbe95c2982a76b5950823bd0d1d4b7e1ea6f1e06a23dd2bb427034" exitCode=0 Jan 26 00:20:40 crc kubenswrapper[4975]: I0126 00:20:40.488963 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" event={"ID":"dd41d02b-525a-4ffd-ace0-ba6fde8853e2","Type":"ContainerDied","Data":"de7d086434dbe95c2982a76b5950823bd0d1d4b7e1ea6f1e06a23dd2bb427034"} Jan 26 00:20:40 crc kubenswrapper[4975]: I0126 00:20:40.488988 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" event={"ID":"dd41d02b-525a-4ffd-ace0-ba6fde8853e2","Type":"ContainerStarted","Data":"22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432"} Jan 26 00:20:40 crc kubenswrapper[4975]: I0126 00:20:40.491508 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerStarted","Data":"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a"} Jan 26 00:20:40 crc kubenswrapper[4975]: I0126 00:20:40.529543 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xzvfg" podStartSLOduration=2.079909248 podStartE2EDuration="4.52952434s" podCreationTimestamp="2026-01-26 00:20:36 +0000 UTC" firstStartedPulling="2026-01-26 00:20:37.456294407 +0000 UTC m=+821.577499901" lastFinishedPulling="2026-01-26 00:20:39.905909499 +0000 UTC m=+824.027114993" observedRunningTime="2026-01-26 00:20:40.528497111 +0000 UTC m=+824.649702615" watchObservedRunningTime="2026-01-26 00:20:40.52952434 +0000 UTC m=+824.650729834" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.499529 4975 generic.go:334] "Generic (PLEG): container finished" podID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerID="e120edbec0b77b3d38786f56cfd07a13c9ead9590b3e92eee7c06fb863ed5ac6" exitCode=0 Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.499614 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" event={"ID":"dd41d02b-525a-4ffd-ace0-ba6fde8853e2","Type":"ContainerDied","Data":"e120edbec0b77b3d38786f56cfd07a13c9ead9590b3e92eee7c06fb863ed5ac6"} Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.628593 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6"] Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.630491 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.646946 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6"] Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.720950 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmnp6\" (UniqueName: \"kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.721025 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.721059 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.822632 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.822722 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmnp6\" (UniqueName: \"kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.822808 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.823265 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.823327 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.843171 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmnp6\" (UniqueName: \"kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:41 crc kubenswrapper[4975]: I0126 00:20:41.950153 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.134712 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6"] Jan 26 00:20:42 crc kubenswrapper[4975]: W0126 00:20:42.141951 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode77f2296_579d_4d32_ad8d_d667b1350d50.slice/crio-510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527 WatchSource:0}: Error finding container 510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527: Status 404 returned error can't find the container with id 510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527 Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.505485 4975 generic.go:334] "Generic (PLEG): container finished" podID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerID="7ffe3a78f74b535d153e7575bd55ec5ab26750f4d769a467197e44ca744b8978" exitCode=0 Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.505956 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerDied","Data":"7ffe3a78f74b535d153e7575bd55ec5ab26750f4d769a467197e44ca744b8978"} Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.506989 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerStarted","Data":"510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527"} Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.511550 4975 generic.go:334] "Generic (PLEG): container finished" podID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerID="6d62f2e54e494d0bcb4691c06e3f4c49f304e40f7b77fd98bd4a41606afb7cdd" exitCode=0 Jan 26 00:20:42 crc kubenswrapper[4975]: I0126 00:20:42.511598 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" event={"ID":"dd41d02b-525a-4ffd-ace0-ba6fde8853e2","Type":"ContainerDied","Data":"6d62f2e54e494d0bcb4691c06e3f4c49f304e40f7b77fd98bd4a41606afb7cdd"} Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.768913 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.852919 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util\") pod \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.853057 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle\") pod \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.853131 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgcbh\" (UniqueName: \"kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh\") pod \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\" (UID: \"dd41d02b-525a-4ffd-ace0-ba6fde8853e2\") " Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.853997 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle" (OuterVolumeSpecName: "bundle") pod "dd41d02b-525a-4ffd-ace0-ba6fde8853e2" (UID: "dd41d02b-525a-4ffd-ace0-ba6fde8853e2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.870360 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util" (OuterVolumeSpecName: "util") pod "dd41d02b-525a-4ffd-ace0-ba6fde8853e2" (UID: "dd41d02b-525a-4ffd-ace0-ba6fde8853e2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.872475 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh" (OuterVolumeSpecName: "kube-api-access-dgcbh") pod "dd41d02b-525a-4ffd-ace0-ba6fde8853e2" (UID: "dd41d02b-525a-4ffd-ace0-ba6fde8853e2"). InnerVolumeSpecName "kube-api-access-dgcbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.955258 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.955301 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:43 crc kubenswrapper[4975]: I0126 00:20:43.955317 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgcbh\" (UniqueName: \"kubernetes.io/projected/dd41d02b-525a-4ffd-ace0-ba6fde8853e2-kube-api-access-dgcbh\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.177885 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:20:44 crc kubenswrapper[4975]: E0126 00:20:44.178423 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="pull" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.178438 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="pull" Jan 26 00:20:44 crc kubenswrapper[4975]: E0126 00:20:44.178453 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="util" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.178460 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="util" Jan 26 00:20:44 crc kubenswrapper[4975]: E0126 00:20:44.178475 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="extract" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.178482 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="extract" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.178586 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd41d02b-525a-4ffd-ace0-ba6fde8853e2" containerName="extract" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.179318 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.195954 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.259617 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phg5f\" (UniqueName: \"kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.259713 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.259750 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.361174 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.361236 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.361303 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phg5f\" (UniqueName: \"kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.361724 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.362098 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.382214 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phg5f\" (UniqueName: \"kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f\") pod \"certified-operators-zbwfk\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.499962 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.524803 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" event={"ID":"dd41d02b-525a-4ffd-ace0-ba6fde8853e2","Type":"ContainerDied","Data":"22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432"} Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.524841 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22e337c5cdf4117bf90cffeed57e07a565da0d51f54c165a484e03ed5d376432" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.524891 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc" Jan 26 00:20:44 crc kubenswrapper[4975]: I0126 00:20:44.978651 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:20:44 crc kubenswrapper[4975]: W0126 00:20:44.988988 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6e399bb_0385_4ecc_9c50_1dfd2a316ad9.slice/crio-31ccc4c8d76387ba109e25a895f7f8cd75b61326524a3adbb1ba3f3d99378945 WatchSource:0}: Error finding container 31ccc4c8d76387ba109e25a895f7f8cd75b61326524a3adbb1ba3f3d99378945: Status 404 returned error can't find the container with id 31ccc4c8d76387ba109e25a895f7f8cd75b61326524a3adbb1ba3f3d99378945 Jan 26 00:20:45 crc kubenswrapper[4975]: I0126 00:20:45.638172 4975 generic.go:334] "Generic (PLEG): container finished" podID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerID="5a9217fa7470a017dd7d7193962fccb7b961794d1ea43544c86fc5bcd314e8bb" exitCode=0 Jan 26 00:20:45 crc kubenswrapper[4975]: I0126 00:20:45.638425 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerDied","Data":"5a9217fa7470a017dd7d7193962fccb7b961794d1ea43544c86fc5bcd314e8bb"} Jan 26 00:20:45 crc kubenswrapper[4975]: I0126 00:20:45.638451 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerStarted","Data":"31ccc4c8d76387ba109e25a895f7f8cd75b61326524a3adbb1ba3f3d99378945"} Jan 26 00:20:46 crc kubenswrapper[4975]: I0126 00:20:46.554711 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:46 crc kubenswrapper[4975]: I0126 00:20:46.555616 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:47 crc kubenswrapper[4975]: I0126 00:20:47.720307 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xzvfg" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="registry-server" probeResult="failure" output=< Jan 26 00:20:47 crc kubenswrapper[4975]: timeout: failed to connect service ":50051" within 1s Jan 26 00:20:47 crc kubenswrapper[4975]: > Jan 26 00:20:52 crc kubenswrapper[4975]: I0126 00:20:52.719823 4975 generic.go:334] "Generic (PLEG): container finished" podID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerID="62adf62148334c16abecba4917458031b36a065dd576095e36763cc3c2130a92" exitCode=0 Jan 26 00:20:52 crc kubenswrapper[4975]: I0126 00:20:52.719908 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerDied","Data":"62adf62148334c16abecba4917458031b36a065dd576095e36763cc3c2130a92"} Jan 26 00:20:52 crc kubenswrapper[4975]: I0126 00:20:52.724179 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerStarted","Data":"dd627221ca0d35c607262885b60d5bc95b7e3461257c7305a5016d0a944fa1ac"} Jan 26 00:20:53 crc kubenswrapper[4975]: I0126 00:20:53.741035 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerStarted","Data":"b990e13320f1cbf3a95b5a6d65a0914d379d04f25f9bc785ef96f7dd005ad859"} Jan 26 00:20:53 crc kubenswrapper[4975]: I0126 00:20:53.931724 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" podStartSLOduration=3.716063094 podStartE2EDuration="12.931695679s" podCreationTimestamp="2026-01-26 00:20:41 +0000 UTC" firstStartedPulling="2026-01-26 00:20:42.507440735 +0000 UTC m=+826.628646229" lastFinishedPulling="2026-01-26 00:20:51.72307332 +0000 UTC m=+835.844278814" observedRunningTime="2026-01-26 00:20:53.920569077 +0000 UTC m=+838.041774581" watchObservedRunningTime="2026-01-26 00:20:53.931695679 +0000 UTC m=+838.052901173" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.563370 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-86c87b945b-tv582"] Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.564564 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.583105 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.584047 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.584136 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.584183 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-4bxvj" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.613431 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-86c87b945b-tv582"] Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.653131 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9fw9\" (UniqueName: \"kubernetes.io/projected/7994d811-7481-4dc1-bb32-0d8cb5cde38f-kube-api-access-l9fw9\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.653363 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-apiservice-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.653504 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-webhook-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.750822 4975 generic.go:334] "Generic (PLEG): container finished" podID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerID="dd627221ca0d35c607262885b60d5bc95b7e3461257c7305a5016d0a944fa1ac" exitCode=0 Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.750927 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerDied","Data":"dd627221ca0d35c607262885b60d5bc95b7e3461257c7305a5016d0a944fa1ac"} Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.754325 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-apiservice-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.754395 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-webhook-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.754449 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9fw9\" (UniqueName: \"kubernetes.io/projected/7994d811-7481-4dc1-bb32-0d8cb5cde38f-kube-api-access-l9fw9\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.757044 4975 generic.go:334] "Generic (PLEG): container finished" podID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerID="b990e13320f1cbf3a95b5a6d65a0914d379d04f25f9bc785ef96f7dd005ad859" exitCode=0 Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.757099 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerDied","Data":"b990e13320f1cbf3a95b5a6d65a0914d379d04f25f9bc785ef96f7dd005ad859"} Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.764042 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-webhook-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.765407 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/7994d811-7481-4dc1-bb32-0d8cb5cde38f-apiservice-cert\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.805587 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9fw9\" (UniqueName: \"kubernetes.io/projected/7994d811-7481-4dc1-bb32-0d8cb5cde38f-kube-api-access-l9fw9\") pod \"elastic-operator-86c87b945b-tv582\" (UID: \"7994d811-7481-4dc1-bb32-0d8cb5cde38f\") " pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:54 crc kubenswrapper[4975]: I0126 00:20:54.886401 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-86c87b945b-tv582" Jan 26 00:20:55 crc kubenswrapper[4975]: I0126 00:20:55.768331 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-86c87b945b-tv582"] Jan 26 00:20:55 crc kubenswrapper[4975]: I0126 00:20:55.788200 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-86c87b945b-tv582" event={"ID":"7994d811-7481-4dc1-bb32-0d8cb5cde38f","Type":"ContainerStarted","Data":"4c005abe4d64ec0cd48c1e1d9d3b342ef87fb125d1243fc58121abe758130a2e"} Jan 26 00:20:55 crc kubenswrapper[4975]: I0126 00:20:55.792770 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerStarted","Data":"1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887"} Jan 26 00:20:55 crc kubenswrapper[4975]: I0126 00:20:55.821001 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zbwfk" podStartSLOduration=7.650223416 podStartE2EDuration="11.820982048s" podCreationTimestamp="2026-01-26 00:20:44 +0000 UTC" firstStartedPulling="2026-01-26 00:20:51.159159285 +0000 UTC m=+835.280364779" lastFinishedPulling="2026-01-26 00:20:55.329917917 +0000 UTC m=+839.451123411" observedRunningTime="2026-01-26 00:20:55.819673881 +0000 UTC m=+839.940879375" watchObservedRunningTime="2026-01-26 00:20:55.820982048 +0000 UTC m=+839.942187542" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.114496 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.276674 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle\") pod \"e77f2296-579d-4d32-ad8d-d667b1350d50\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.277056 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmnp6\" (UniqueName: \"kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6\") pod \"e77f2296-579d-4d32-ad8d-d667b1350d50\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.277171 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util\") pod \"e77f2296-579d-4d32-ad8d-d667b1350d50\" (UID: \"e77f2296-579d-4d32-ad8d-d667b1350d50\") " Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.277838 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle" (OuterVolumeSpecName: "bundle") pod "e77f2296-579d-4d32-ad8d-d667b1350d50" (UID: "e77f2296-579d-4d32-ad8d-d667b1350d50"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.287525 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util" (OuterVolumeSpecName: "util") pod "e77f2296-579d-4d32-ad8d-d667b1350d50" (UID: "e77f2296-579d-4d32-ad8d-d667b1350d50"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.292457 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6" (OuterVolumeSpecName: "kube-api-access-cmnp6") pod "e77f2296-579d-4d32-ad8d-d667b1350d50" (UID: "e77f2296-579d-4d32-ad8d-d667b1350d50"). InnerVolumeSpecName "kube-api-access-cmnp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.379395 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.379443 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmnp6\" (UniqueName: \"kubernetes.io/projected/e77f2296-579d-4d32-ad8d-d667b1350d50-kube-api-access-cmnp6\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.379462 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e77f2296-579d-4d32-ad8d-d667b1350d50-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.566622 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.622616 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.759847 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q"] Jan 26 00:20:56 crc kubenswrapper[4975]: E0126 00:20:56.760409 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="pull" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.760512 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="pull" Jan 26 00:20:56 crc kubenswrapper[4975]: E0126 00:20:56.760571 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="extract" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.760620 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="extract" Jan 26 00:20:56 crc kubenswrapper[4975]: E0126 00:20:56.760694 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="util" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.762123 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="util" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.762356 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77f2296-579d-4d32-ad8d-d667b1350d50" containerName="extract" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.762975 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.768422 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.768619 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-72945" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.768801 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.771180 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q"] Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.791606 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mxwf\" (UniqueName: \"kubernetes.io/projected/9523511a-d66c-42d3-86b2-5572b83ed21e-kube-api-access-6mxwf\") pod \"obo-prometheus-operator-68bc856cb9-cps6q\" (UID: \"9523511a-d66c-42d3-86b2-5572b83ed21e\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.807317 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.807794 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6" event={"ID":"e77f2296-579d-4d32-ad8d-d667b1350d50","Type":"ContainerDied","Data":"510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527"} Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.807833 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.892389 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mxwf\" (UniqueName: \"kubernetes.io/projected/9523511a-d66c-42d3-86b2-5572b83ed21e-kube-api-access-6mxwf\") pod \"obo-prometheus-operator-68bc856cb9-cps6q\" (UID: \"9523511a-d66c-42d3-86b2-5572b83ed21e\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.927489 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mxwf\" (UniqueName: \"kubernetes.io/projected/9523511a-d66c-42d3-86b2-5572b83ed21e-kube-api-access-6mxwf\") pod \"obo-prometheus-operator-68bc856cb9-cps6q\" (UID: \"9523511a-d66c-42d3-86b2-5572b83ed21e\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.936089 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n"] Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.942582 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.945354 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-hz4nm" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.945587 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.957172 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv"] Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.957955 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:56 crc kubenswrapper[4975]: I0126 00:20:56.970015 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n"] Jan 26 00:20:57 crc kubenswrapper[4975]: E0126 00:20:56.979163 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode77f2296_579d_4d32_ad8d_d667b1350d50.slice/crio-510b74316f4b730f68ecd589cd861d0fe702cd4a5f2e39f92675af3c5ff57527\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode77f2296_579d_4d32_ad8d_d667b1350d50.slice\": RecentStats: unable to find data in memory cache]" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.076891 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.092059 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.129568 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.129618 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.129671 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.129704 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.148555 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gbqn9"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.149492 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.152715 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-vpj67" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.152797 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.183098 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gbqn9"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.230808 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.230885 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.230905 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.230952 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.234659 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.237490 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f01df3f6-766e-45bc-b28b-5fbf18581625-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv\" (UID: \"f01df3f6-766e-45bc-b28b-5fbf18581625\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.240608 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.254971 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f403cd64-91dd-45f8-b0b0-981505389e7a-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n\" (UID: \"f403cd64-91dd-45f8-b0b0-981505389e7a\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.333904 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.333958 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2llj\" (UniqueName: \"kubernetes.io/projected/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-kube-api-access-n2llj\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.340136 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.363036 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.398579 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6tfl5"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.399566 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.409051 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-t5t4l" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.419568 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6tfl5"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.438891 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.438960 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2llj\" (UniqueName: \"kubernetes.io/projected/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-kube-api-access-n2llj\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.439026 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/93742037-1757-4c1c-b40d-c1e6065bdf8c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.439519 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlssb\" (UniqueName: \"kubernetes.io/projected/93742037-1757-4c1c-b40d-c1e6065bdf8c-kube-api-access-mlssb\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.450514 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-observability-operator-tls\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.468157 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2llj\" (UniqueName: \"kubernetes.io/projected/47b2b3cb-f050-438c-b2d4-2ed6b594fad9-kube-api-access-n2llj\") pod \"observability-operator-59bdc8b94-gbqn9\" (UID: \"47b2b3cb-f050-438c-b2d4-2ed6b594fad9\") " pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.487941 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.526562 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q"] Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.540255 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlssb\" (UniqueName: \"kubernetes.io/projected/93742037-1757-4c1c-b40d-c1e6065bdf8c-kube-api-access-mlssb\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.540354 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/93742037-1757-4c1c-b40d-c1e6065bdf8c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.541399 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/93742037-1757-4c1c-b40d-c1e6065bdf8c-openshift-service-ca\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.561527 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlssb\" (UniqueName: \"kubernetes.io/projected/93742037-1757-4c1c-b40d-c1e6065bdf8c-kube-api-access-mlssb\") pod \"perses-operator-5bf474d74f-6tfl5\" (UID: \"93742037-1757-4c1c-b40d-c1e6065bdf8c\") " pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.779752 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.820038 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" event={"ID":"9523511a-d66c-42d3-86b2-5572b83ed21e","Type":"ContainerStarted","Data":"c27c2f61040e6d8e9033f73a336abd12a2cd91d98b7555a56c22a3b819e8f9e3"} Jan 26 00:20:57 crc kubenswrapper[4975]: I0126 00:20:57.965047 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-gbqn9"] Jan 26 00:20:57 crc kubenswrapper[4975]: W0126 00:20:57.989529 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47b2b3cb_f050_438c_b2d4_2ed6b594fad9.slice/crio-b146899d84f10ddfc117c5172d2cb86747523518d39a13fa31fbeef69cfc683e WatchSource:0}: Error finding container b146899d84f10ddfc117c5172d2cb86747523518d39a13fa31fbeef69cfc683e: Status 404 returned error can't find the container with id b146899d84f10ddfc117c5172d2cb86747523518d39a13fa31fbeef69cfc683e Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.030964 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n"] Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.036573 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv"] Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.274668 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-6tfl5"] Jan 26 00:20:58 crc kubenswrapper[4975]: W0126 00:20:58.278128 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93742037_1757_4c1c_b40d_c1e6065bdf8c.slice/crio-f00ae8eb1b3bd5494605b369302e36271edc74cbb2f36c1f4c224ac22cc05fe3 WatchSource:0}: Error finding container f00ae8eb1b3bd5494605b369302e36271edc74cbb2f36c1f4c224ac22cc05fe3: Status 404 returned error can't find the container with id f00ae8eb1b3bd5494605b369302e36271edc74cbb2f36c1f4c224ac22cc05fe3 Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.779267 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.779558 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xzvfg" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="registry-server" containerID="cri-o://282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a" gracePeriod=2 Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.833852 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" event={"ID":"47b2b3cb-f050-438c-b2d4-2ed6b594fad9","Type":"ContainerStarted","Data":"b146899d84f10ddfc117c5172d2cb86747523518d39a13fa31fbeef69cfc683e"} Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.836642 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" event={"ID":"93742037-1757-4c1c-b40d-c1e6065bdf8c","Type":"ContainerStarted","Data":"f00ae8eb1b3bd5494605b369302e36271edc74cbb2f36c1f4c224ac22cc05fe3"} Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.838159 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" event={"ID":"f01df3f6-766e-45bc-b28b-5fbf18581625","Type":"ContainerStarted","Data":"3863e885081e66815df3fb02723b77a74c1a5b60825d9af5c2bdf787fd1b3835"} Jan 26 00:20:58 crc kubenswrapper[4975]: I0126 00:20:58.843009 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" event={"ID":"f403cd64-91dd-45f8-b0b0-981505389e7a","Type":"ContainerStarted","Data":"715d0fb6b53a7dde7539b612784877623bd71cec7a45185d9d65c6acec927da5"} Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.843785 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.872010 4975 generic.go:334] "Generic (PLEG): container finished" podID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerID="282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a" exitCode=0 Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.872066 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerDied","Data":"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a"} Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.872092 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xzvfg" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.872134 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xzvfg" event={"ID":"1ffb7239-96f9-4a22-99af-9defbc219e6b","Type":"ContainerDied","Data":"b4530e80ec22539ac1c6422f5d4918506a4933d729826ff269d07de4a47174d7"} Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.872164 4975 scope.go:117] "RemoveContainer" containerID="282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.888174 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content\") pod \"1ffb7239-96f9-4a22-99af-9defbc219e6b\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.888228 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p687v\" (UniqueName: \"kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v\") pod \"1ffb7239-96f9-4a22-99af-9defbc219e6b\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.888293 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities\") pod \"1ffb7239-96f9-4a22-99af-9defbc219e6b\" (UID: \"1ffb7239-96f9-4a22-99af-9defbc219e6b\") " Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.889346 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities" (OuterVolumeSpecName: "utilities") pod "1ffb7239-96f9-4a22-99af-9defbc219e6b" (UID: "1ffb7239-96f9-4a22-99af-9defbc219e6b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.911041 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v" (OuterVolumeSpecName: "kube-api-access-p687v") pod "1ffb7239-96f9-4a22-99af-9defbc219e6b" (UID: "1ffb7239-96f9-4a22-99af-9defbc219e6b"). InnerVolumeSpecName "kube-api-access-p687v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.990206 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:20:59 crc kubenswrapper[4975]: I0126 00:20:59.990263 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p687v\" (UniqueName: \"kubernetes.io/projected/1ffb7239-96f9-4a22-99af-9defbc219e6b-kube-api-access-p687v\") on node \"crc\" DevicePath \"\"" Jan 26 00:21:00 crc kubenswrapper[4975]: I0126 00:21:00.025840 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1ffb7239-96f9-4a22-99af-9defbc219e6b" (UID: "1ffb7239-96f9-4a22-99af-9defbc219e6b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:21:00 crc kubenswrapper[4975]: I0126 00:21:00.095992 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1ffb7239-96f9-4a22-99af-9defbc219e6b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:21:00 crc kubenswrapper[4975]: I0126 00:21:00.289352 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:21:00 crc kubenswrapper[4975]: I0126 00:21:00.315885 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xzvfg"] Jan 26 00:21:02 crc kubenswrapper[4975]: I0126 00:21:02.161153 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" path="/var/lib/kubelet/pods/1ffb7239-96f9-4a22-99af-9defbc219e6b/volumes" Jan 26 00:21:03 crc kubenswrapper[4975]: I0126 00:21:03.341751 4975 scope.go:117] "RemoveContainer" containerID="6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca" Jan 26 00:21:04 crc kubenswrapper[4975]: I0126 00:21:04.503957 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:04 crc kubenswrapper[4975]: I0126 00:21:04.504039 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:04 crc kubenswrapper[4975]: I0126 00:21:04.686071 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:05 crc kubenswrapper[4975]: I0126 00:21:05.094702 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:08 crc kubenswrapper[4975]: I0126 00:21:08.372302 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:21:08 crc kubenswrapper[4975]: I0126 00:21:08.379103 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zbwfk" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="registry-server" containerID="cri-o://1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" gracePeriod=2 Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.004490 4975 generic.go:334] "Generic (PLEG): container finished" podID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerID="1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" exitCode=0 Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.004567 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerDied","Data":"1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887"} Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.248772 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7"] Jan 26 00:21:09 crc kubenswrapper[4975]: E0126 00:21:09.249171 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="registry-server" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.249196 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="registry-server" Jan 26 00:21:09 crc kubenswrapper[4975]: E0126 00:21:09.249210 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="extract-content" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.249218 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="extract-content" Jan 26 00:21:09 crc kubenswrapper[4975]: E0126 00:21:09.249229 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="extract-utilities" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.249238 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="extract-utilities" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.249369 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ffb7239-96f9-4a22-99af-9defbc219e6b" containerName="registry-server" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.249969 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.256690 4975 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-26g5r" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.256950 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.257947 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.268500 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7"] Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.311459 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dnpp4\" (UniqueName: \"kubernetes.io/projected/e55de362-fd69-455f-a9fc-04c7f47abaae-kube-api-access-dnpp4\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.311542 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e55de362-fd69-455f-a9fc-04c7f47abaae-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.413460 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dnpp4\" (UniqueName: \"kubernetes.io/projected/e55de362-fd69-455f-a9fc-04c7f47abaae-kube-api-access-dnpp4\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.413534 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e55de362-fd69-455f-a9fc-04c7f47abaae-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.414238 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/e55de362-fd69-455f-a9fc-04c7f47abaae-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.458470 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dnpp4\" (UniqueName: \"kubernetes.io/projected/e55de362-fd69-455f-a9fc-04c7f47abaae-kube-api-access-dnpp4\") pod \"cert-manager-operator-controller-manager-5446d6888b-v5rv7\" (UID: \"e55de362-fd69-455f-a9fc-04c7f47abaae\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:09 crc kubenswrapper[4975]: I0126 00:21:09.581264 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" Jan 26 00:21:14 crc kubenswrapper[4975]: E0126 00:21:14.526521 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887 is running failed: container process not found" containerID="1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:21:14 crc kubenswrapper[4975]: E0126 00:21:14.529020 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887 is running failed: container process not found" containerID="1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:21:14 crc kubenswrapper[4975]: E0126 00:21:14.529404 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887 is running failed: container process not found" containerID="1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:21:14 crc kubenswrapper[4975]: E0126 00:21:14.529459 4975 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-zbwfk" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="registry-server" Jan 26 00:21:16 crc kubenswrapper[4975]: E0126 00:21:16.388526 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a" Jan 26 00:21:16 crc kubenswrapper[4975]: E0126 00:21:16.388929 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a,Command:[],Args:[--prometheus-config-reloader=$(RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER) --prometheus-instance-selector=app.kubernetes.io/managed-by=observability-operator --alertmanager-instance-selector=app.kubernetes.io/managed-by=observability-operator --thanos-ruler-instance-selector=app.kubernetes.io/managed-by=observability-operator --watch-referenced-objects-in-all-namespaces=true --disable-unmanaged-prometheus-configuration=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:GOGC,Value:30,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_PROMETHEUS_CONFIG_RELOADER,Value:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-prometheus-config-reloader-rhel9@sha256:9a2097bc5b2e02bc1703f64c452ce8fe4bc6775b732db930ff4770b76ae4653a,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{157286400 0} {} 150Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6mxwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-68bc856cb9-cps6q_openshift-operators(9523511a-d66c-42d3-86b2-5572b83ed21e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:21:16 crc kubenswrapper[4975]: E0126 00:21:16.390259 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" podUID="9523511a-d66c-42d3-86b2-5572b83ed21e" Jan 26 00:21:17 crc kubenswrapper[4975]: E0126 00:21:17.064008 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-rhel9-operator@sha256:e7e5f4c5e8ab0ba298ef0295a7137d438a42eb177d9322212cde6ba8f367912a\\\"\"" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" podUID="9523511a-d66c-42d3-86b2-5572b83ed21e" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.529808 4975 scope.go:117] "RemoveContainer" containerID="e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.538249 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.538502 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n_openshift-operators(f403cd64-91dd-45f8-b0b0-981505389e7a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.539625 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" podUID="f403cd64-91dd-45f8-b0b0-981505389e7a" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.558790 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.558959 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv_openshift-operators(f01df3f6-766e-45bc-b28b-5fbf18581625): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.561422 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" podUID="f01df3f6-766e-45bc-b28b-5fbf18581625" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.607947 4975 scope.go:117] "RemoveContainer" containerID="282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.616065 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a\": container with ID starting with 282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a not found: ID does not exist" containerID="282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.616094 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a"} err="failed to get container status \"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a\": rpc error: code = NotFound desc = could not find container \"282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a\": container with ID starting with 282a38acdd3d60ff8120c0df11f3ed478db217526e12e89258840bb75c656a8a not found: ID does not exist" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.616117 4975 scope.go:117] "RemoveContainer" containerID="6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.616340 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca\": container with ID starting with 6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca not found: ID does not exist" containerID="6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.616355 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca"} err="failed to get container status \"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca\": rpc error: code = NotFound desc = could not find container \"6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca\": container with ID starting with 6905210e8785fa468a98c8a63e19db583d0b10ad6e741b19d4f39c8f7b2804ca not found: ID does not exist" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.616367 4975 scope.go:117] "RemoveContainer" containerID="e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4" Jan 26 00:21:18 crc kubenswrapper[4975]: E0126 00:21:18.617055 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4\": container with ID starting with e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4 not found: ID does not exist" containerID="e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4" Jan 26 00:21:18 crc kubenswrapper[4975]: I0126 00:21:18.617074 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4"} err="failed to get container status \"e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4\": rpc error: code = NotFound desc = could not find container \"e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4\": container with ID starting with e6dcbabf2d17c43518f38f243e9bb28fd380197ce5b31236bb2d9aa3491816f4 not found: ID does not exist" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.108905 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.200723 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7"] Jan 26 00:21:19 crc kubenswrapper[4975]: W0126 00:21:19.211939 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode55de362_fd69_455f_a9fc_04c7f47abaae.slice/crio-1d7433b61e55bee35e531040f3ba6c19b8e9dc5aded83da0a50ca9df2f5c393c WatchSource:0}: Error finding container 1d7433b61e55bee35e531040f3ba6c19b8e9dc5aded83da0a50ca9df2f5c393c: Status 404 returned error can't find the container with id 1d7433b61e55bee35e531040f3ba6c19b8e9dc5aded83da0a50ca9df2f5c393c Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.217469 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content\") pod \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.217629 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities\") pod \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.217705 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-phg5f\" (UniqueName: \"kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f\") pod \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\" (UID: \"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9\") " Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.218422 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities" (OuterVolumeSpecName: "utilities") pod "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" (UID: "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.225915 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f" (OuterVolumeSpecName: "kube-api-access-phg5f") pod "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" (UID: "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9"). InnerVolumeSpecName "kube-api-access-phg5f". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.260635 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" (UID: "b6e399bb-0385-4ecc-9c50-1dfd2a316ad9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.319024 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.319064 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-phg5f\" (UniqueName: \"kubernetes.io/projected/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-kube-api-access-phg5f\") on node \"crc\" DevicePath \"\"" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.319075 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.322643 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbwfk" event={"ID":"b6e399bb-0385-4ecc-9c50-1dfd2a316ad9","Type":"ContainerDied","Data":"31ccc4c8d76387ba109e25a895f7f8cd75b61326524a3adbb1ba3f3d99378945"} Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.322694 4975 scope.go:117] "RemoveContainer" containerID="1724845fa86d62d9740b321f1ebe9c58d74dc9684b1587a46c8aa37a500e3887" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.322657 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbwfk" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.333903 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" event={"ID":"47b2b3cb-f050-438c-b2d4-2ed6b594fad9","Type":"ContainerStarted","Data":"58ab4e48589995cfe99e09fac70f3fb4d064e37ebe3a3488bc9f2e9035cdf340"} Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.335123 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.336979 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.338019 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" event={"ID":"93742037-1757-4c1c-b40d-c1e6065bdf8c","Type":"ContainerStarted","Data":"6b4ef5b36596475ab97c09ce3abe8f15854fb5d0660f27a979ce27a345aa0815"} Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.338788 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.344030 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" event={"ID":"e55de362-fd69-455f-a9fc-04c7f47abaae","Type":"ContainerStarted","Data":"1d7433b61e55bee35e531040f3ba6c19b8e9dc5aded83da0a50ca9df2f5c393c"} Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.348214 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-86c87b945b-tv582" event={"ID":"7994d811-7481-4dc1-bb32-0d8cb5cde38f","Type":"ContainerStarted","Data":"4101a09249f5308a74ed2a6c781cd251f04d7ec6736337829ba3794b46711350"} Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.348629 4975 scope.go:117] "RemoveContainer" containerID="dd627221ca0d35c607262885b60d5bc95b7e3461257c7305a5016d0a944fa1ac" Jan 26 00:21:19 crc kubenswrapper[4975]: E0126 00:21:19.348691 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" podUID="f01df3f6-766e-45bc-b28b-5fbf18581625" Jan 26 00:21:19 crc kubenswrapper[4975]: E0126 00:21:19.349217 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea\\\"\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" podUID="f403cd64-91dd-45f8-b0b0-981505389e7a" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.377704 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-gbqn9" podStartSLOduration=1.7679773380000001 podStartE2EDuration="22.377681441s" podCreationTimestamp="2026-01-26 00:20:57 +0000 UTC" firstStartedPulling="2026-01-26 00:20:57.998289058 +0000 UTC m=+842.119494552" lastFinishedPulling="2026-01-26 00:21:18.607993171 +0000 UTC m=+862.729198655" observedRunningTime="2026-01-26 00:21:19.374253764 +0000 UTC m=+863.495459258" watchObservedRunningTime="2026-01-26 00:21:19.377681441 +0000 UTC m=+863.498886935" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.387040 4975 scope.go:117] "RemoveContainer" containerID="5a9217fa7470a017dd7d7193962fccb7b961794d1ea43544c86fc5bcd314e8bb" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.436248 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" podStartSLOduration=2.107735503 podStartE2EDuration="22.436226344s" podCreationTimestamp="2026-01-26 00:20:57 +0000 UTC" firstStartedPulling="2026-01-26 00:20:58.280856448 +0000 UTC m=+842.402061942" lastFinishedPulling="2026-01-26 00:21:18.609347289 +0000 UTC m=+862.730552783" observedRunningTime="2026-01-26 00:21:19.429496525 +0000 UTC m=+863.550702019" watchObservedRunningTime="2026-01-26 00:21:19.436226344 +0000 UTC m=+863.557431838" Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.454360 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.464116 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zbwfk"] Jan 26 00:21:19 crc kubenswrapper[4975]: I0126 00:21:19.499573 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-86c87b945b-tv582" podStartSLOduration=2.740873563 podStartE2EDuration="25.499556761s" podCreationTimestamp="2026-01-26 00:20:54 +0000 UTC" firstStartedPulling="2026-01-26 00:20:55.764434141 +0000 UTC m=+839.885639635" lastFinishedPulling="2026-01-26 00:21:18.523117339 +0000 UTC m=+862.644322833" observedRunningTime="2026-01-26 00:21:19.494656903 +0000 UTC m=+863.615862407" watchObservedRunningTime="2026-01-26 00:21:19.499556761 +0000 UTC m=+863.620762255" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.155226 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" path="/var/lib/kubelet/pods/b6e399bb-0385-4ecc-9c50-1dfd2a316ad9/volumes" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.281197 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 26 00:21:20 crc kubenswrapper[4975]: E0126 00:21:20.281772 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="extract-utilities" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.281807 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="extract-utilities" Jan 26 00:21:20 crc kubenswrapper[4975]: E0126 00:21:20.281822 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="extract-content" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.281830 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="extract-content" Jan 26 00:21:20 crc kubenswrapper[4975]: E0126 00:21:20.281844 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="registry-server" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.281852 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="registry-server" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.281967 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6e399bb-0385-4ecc-9c50-1dfd2a316ad9" containerName="registry-server" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.282855 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.286690 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.286929 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.287107 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.287337 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.287584 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.287844 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-ttgn2" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.287885 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.288018 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.288506 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332578 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332630 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332670 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332694 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332717 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332785 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332833 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332860 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332893 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332915 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332935 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.332968 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.333008 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.333034 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/9ca3c33e-c168-4aec-b194-821f5b3f3995-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.333076 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.371749 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.434798 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.434914 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.434950 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.434984 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435020 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435041 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435074 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435117 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435143 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435173 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435198 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435221 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435249 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435301 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435325 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/9ca3c33e-c168-4aec-b194-821f5b3f3995-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435521 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.435983 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.436395 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.436451 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.437600 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.437823 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.438191 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.439943 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.440061 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.440338 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.440754 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/9ca3c33e-c168-4aec-b194-821f5b3f3995-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.442514 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/9ca3c33e-c168-4aec-b194-821f5b3f3995-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.449703 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.460267 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.460948 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/9ca3c33e-c168-4aec-b194-821f5b3f3995-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"9ca3c33e-c168-4aec-b194-821f5b3f3995\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:20 crc kubenswrapper[4975]: I0126 00:21:20.600302 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:21:21 crc kubenswrapper[4975]: I0126 00:21:21.042816 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 26 00:21:21 crc kubenswrapper[4975]: I0126 00:21:21.376907 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"9ca3c33e-c168-4aec-b194-821f5b3f3995","Type":"ContainerStarted","Data":"d8512f0e03ada497a1da3a418d7ca4ad947c3659677f996157acc70fa7a8eac1"} Jan 26 00:21:27 crc kubenswrapper[4975]: I0126 00:21:27.832173 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-6tfl5" Jan 26 00:21:34 crc kubenswrapper[4975]: E0126 00:21:34.785270 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911" Jan 26 00:21:34 crc kubenswrapper[4975]: E0126 00:21:34.785996 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-operator,Image:registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911,Command:[/usr/bin/cert-manager-operator],Args:[start --v=$(OPERATOR_LOG_LEVEL) --trusted-ca-configmap=$(TRUSTED_CA_CONFIGMAP_NAME) --cloud-credentials-secret=$(CLOUD_CREDENTIALS_SECRET_NAME) --unsupported-addon-features=$(UNSUPPORTED_ADDON_FEATURES)],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:cert-manager-operator,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_WEBHOOK,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CA_INJECTOR,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CONTROLLER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ACMESOLVER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-acmesolver-rhel9@sha256:ba937fc4b9eee31422914352c11a45b90754ba4fbe490ea45249b90afdc4e0a7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ISTIOCSR,Value:registry.redhat.io/cert-manager/cert-manager-istio-csr-rhel9@sha256:af1ac813b8ee414ef215936f05197bc498bccbd540f3e2a93cb522221ba112bc,ValueFrom:nil,},EnvVar{Name:OPERAND_IMAGE_VERSION,Value:1.18.3,ValueFrom:nil,},EnvVar{Name:ISTIOCSR_OPERAND_IMAGE_VERSION,Value:0.14.2,ValueFrom:nil,},EnvVar{Name:OPERATOR_IMAGE_VERSION,Value:1.18.0,ValueFrom:nil,},EnvVar{Name:OPERATOR_LOG_LEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:TRUSTED_CA_CONFIGMAP_NAME,Value:,ValueFrom:nil,},EnvVar{Name:CLOUD_CREDENTIALS_SECRET_NAME,Value:,ValueFrom:nil,},EnvVar{Name:UNSUPPORTED_ADDON_FEATURES,Value:,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cert-manager-operator.v1.18.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{33554432 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:tmp,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dnpp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000680000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-operator-controller-manager-5446d6888b-v5rv7_cert-manager-operator(e55de362-fd69-455f-a9fc-04c7f47abaae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:21:34 crc kubenswrapper[4975]: E0126 00:21:34.788490 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" podUID="e55de362-fd69-455f-a9fc-04c7f47abaae" Jan 26 00:21:35 crc kubenswrapper[4975]: E0126 00:21:35.511595 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911\\\"\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" podUID="e55de362-fd69-455f-a9fc-04c7f47abaae" Jan 26 00:21:48 crc kubenswrapper[4975]: E0126 00:21:48.487936 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea" Jan 26 00:21:48 crc kubenswrapper[4975]: E0126 00:21:48.488753 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-operator-admission-webhook,Image:registry.redhat.io/cluster-observability-operator/obo-prometheus-operator-admission-webhook-rhel9@sha256:42ebc3571195d8c41fd01b8d08e98fe2cc12c1caabea251aecb4442d8eade4ea,Command:[],Args:[--web.enable-tls=true --web.cert-file=/tmp/k8s-webhook-server/serving-certs/tls.crt --web.key-file=/tmp/k8s-webhook-server/serving-certs/tls.key],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cluster-observability-operator.v1.3.1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{209715200 0} {} BinarySI},},Requests:ResourceList{cpu: {{50 -3} {} 50m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:apiservice-cert,ReadOnly:false,MountPath:/apiserver.local.config/certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:webhook-cert,ReadOnly:false,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n_openshift-operators(f403cd64-91dd-45f8-b0b0-981505389e7a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:21:48 crc kubenswrapper[4975]: E0126 00:21:48.490099 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-operator-admission-webhook\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" podUID="f403cd64-91dd-45f8-b0b0-981505389e7a" Jan 26 00:21:49 crc kubenswrapper[4975]: E0126 00:21:49.096922 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Jan 26 00:21:49 crc kubenswrapper[4975]: E0126 00:21:49.097758 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:elastic-internal-init-filesystem,Image:registry.connect.redhat.com/elastic/elasticsearch:7.17.20,Command:[bash -c /mnt/elastic-internal/scripts/prepare-fs.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:HEADLESS_SERVICE_NAME,Value:elasticsearch-es-default,ValueFrom:nil,},EnvVar{Name:PROBE_PASSWORD_PATH,Value:/mnt/elastic-internal/pod-mounted-users/elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:PROBE_USERNAME,Value:elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:READINESS_PROBE_PROTOCOL,Value:https,ValueFrom:nil,},EnvVar{Name:NSS_SDB_USE_CACHE,Value:no,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:downward-api,ReadOnly:true,MountPath:/mnt/elastic-internal/downward-api,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-bin-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-bin-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config,ReadOnly:true,MountPath:/mnt/elastic-internal/elasticsearch-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-config-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-plugins-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-plugins-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-http-certificates,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/http-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-probe-user,ReadOnly:true,MountPath:/mnt/elastic-internal/pod-mounted-users,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-remote-certificate-authorities,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/transport-remote-certs/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-scripts,ReadOnly:true,MountPath:/mnt/elastic-internal/scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-transport-certificates,ReadOnly:true,MountPath:/mnt/elastic-internal/transport-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-unicast-hosts,ReadOnly:true,MountPath:/mnt/elastic-internal/unicast-hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-xpack-file-realm,ReadOnly:true,MountPath:/mnt/elastic-internal/xpack-file-realm,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-data,ReadOnly:false,MountPath:/usr/share/elasticsearch/data,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-logs,ReadOnly:false,MountPath:/usr/share/elasticsearch/logs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tmp-volume,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elasticsearch-es-default-0_service-telemetry(9ca3c33e-c168-4aec-b194-821f5b3f3995): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 00:21:49 crc kubenswrapper[4975]: E0126 00:21:49.099945 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.828992 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" event={"ID":"e55de362-fd69-455f-a9fc-04c7f47abaae","Type":"ContainerStarted","Data":"c5ba67d9fde55c0ccce0f2ce49f2883ac641141473283a648ef067bba6ec00f8"} Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.831115 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" event={"ID":"9523511a-d66c-42d3-86b2-5572b83ed21e","Type":"ContainerStarted","Data":"45f610ef8a3088eb739dc95aea867d34d5bb39b42d4aa070d8433e2eb987dc73"} Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.833132 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" event={"ID":"f01df3f6-766e-45bc-b28b-5fbf18581625","Type":"ContainerStarted","Data":"257bbded0aba6c2d299efcd0985113cac7933bd1f35a9bd63373bd905b185cf6"} Jan 26 00:21:49 crc kubenswrapper[4975]: E0126 00:21:49.834354 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.853062 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-v5rv7" podStartSLOduration=10.84485661 podStartE2EDuration="40.853044632s" podCreationTimestamp="2026-01-26 00:21:09 +0000 UTC" firstStartedPulling="2026-01-26 00:21:19.215504969 +0000 UTC m=+863.336710453" lastFinishedPulling="2026-01-26 00:21:49.223692981 +0000 UTC m=+893.344898475" observedRunningTime="2026-01-26 00:21:49.850371427 +0000 UTC m=+893.971576921" watchObservedRunningTime="2026-01-26 00:21:49.853044632 +0000 UTC m=+893.974250126" Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.914157 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv" podStartSLOduration=3.016010559 podStartE2EDuration="53.914141247s" podCreationTimestamp="2026-01-26 00:20:56 +0000 UTC" firstStartedPulling="2026-01-26 00:20:58.080236618 +0000 UTC m=+842.201442112" lastFinishedPulling="2026-01-26 00:21:48.978367306 +0000 UTC m=+893.099572800" observedRunningTime="2026-01-26 00:21:49.909370593 +0000 UTC m=+894.030576087" watchObservedRunningTime="2026-01-26 00:21:49.914141247 +0000 UTC m=+894.035346741" Jan 26 00:21:49 crc kubenswrapper[4975]: I0126 00:21:49.943921 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-cps6q" podStartSLOduration=2.7595635720000002 podStartE2EDuration="53.943903852s" podCreationTimestamp="2026-01-26 00:20:56 +0000 UTC" firstStartedPulling="2026-01-26 00:20:57.547990962 +0000 UTC m=+841.669196456" lastFinishedPulling="2026-01-26 00:21:48.732331242 +0000 UTC m=+892.853536736" observedRunningTime="2026-01-26 00:21:49.937115312 +0000 UTC m=+894.058320806" watchObservedRunningTime="2026-01-26 00:21:49.943903852 +0000 UTC m=+894.065109346" Jan 26 00:21:50 crc kubenswrapper[4975]: I0126 00:21:50.653541 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 26 00:21:50 crc kubenswrapper[4975]: I0126 00:21:50.698324 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 26 00:21:50 crc kubenswrapper[4975]: E0126 00:21:50.841347 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" Jan 26 00:21:51 crc kubenswrapper[4975]: E0126 00:21:51.846303 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.619871 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4lbtg"] Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.621247 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.623876 4975 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-4tzqk" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.624115 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.624890 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.628841 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4lbtg"] Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.680309 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n6zv\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-kube-api-access-4n6zv\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.680416 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.782287 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.782392 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n6zv\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-kube-api-access-4n6zv\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.810464 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.817117 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n6zv\" (UniqueName: \"kubernetes.io/projected/a9fa8b62-74b2-4068-b94e-1968a498a379-kube-api-access-4n6zv\") pod \"cert-manager-webhook-f4fb5df64-4lbtg\" (UID: \"a9fa8b62-74b2-4068-b94e-1968a498a379\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:53 crc kubenswrapper[4975]: I0126 00:21:53.991774 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:21:54 crc kubenswrapper[4975]: I0126 00:21:54.384407 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-4lbtg"] Jan 26 00:21:54 crc kubenswrapper[4975]: I0126 00:21:54.866963 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" event={"ID":"a9fa8b62-74b2-4068-b94e-1968a498a379","Type":"ContainerStarted","Data":"ddac34a242adc300422e8a3f097a919faf17cbb48b3cf24d0329e30713be8f22"} Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.618034 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk"] Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.618909 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.621218 4975 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-45khl" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.632019 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk"] Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.714989 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.715143 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcqtt\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-kube-api-access-qcqtt\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.816342 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcqtt\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-kube-api-access-qcqtt\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.816399 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.866452 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcqtt\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-kube-api-access-qcqtt\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.866500 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/44e30ae9-4d62-4cdd-92c9-5e20b05bec04-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-x2vvk\" (UID: \"44e30ae9-4d62-4cdd-92c9-5e20b05bec04\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:55 crc kubenswrapper[4975]: I0126 00:21:55.940153 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" Jan 26 00:21:56 crc kubenswrapper[4975]: I0126 00:21:56.705976 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk"] Jan 26 00:21:56 crc kubenswrapper[4975]: W0126 00:21:56.721534 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44e30ae9_4d62_4cdd_92c9_5e20b05bec04.slice/crio-0668fa5f15361871a654a24f1e02e9f5d3ab57264b388231324a17e79b17ec35 WatchSource:0}: Error finding container 0668fa5f15361871a654a24f1e02e9f5d3ab57264b388231324a17e79b17ec35: Status 404 returned error can't find the container with id 0668fa5f15361871a654a24f1e02e9f5d3ab57264b388231324a17e79b17ec35 Jan 26 00:21:56 crc kubenswrapper[4975]: I0126 00:21:56.891157 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" event={"ID":"44e30ae9-4d62-4cdd-92c9-5e20b05bec04","Type":"ContainerStarted","Data":"0668fa5f15361871a654a24f1e02e9f5d3ab57264b388231324a17e79b17ec35"} Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.505329 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.507487 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.509680 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-framework-index-dockercfg" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.510140 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-global-ca" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.510322 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-sys-config" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.510461 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-framework-index-1-ca" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.513228 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"builder-dockercfg-svncm" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.542038 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.600873 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.600927 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.600951 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-svncm-push\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601113 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601202 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601274 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601320 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"builder-dockercfg-svncm-pull\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601399 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601460 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601492 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44g6x\" (UniqueName: \"kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601536 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601577 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.601680 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.703601 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.703773 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.703977 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704337 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704380 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704441 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704491 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-svncm-push\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704577 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704640 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704709 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704717 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704785 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"builder-dockercfg-svncm-pull\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704889 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704936 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.704965 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44g6x\" (UniqueName: \"kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.705472 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.705557 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.705634 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.706005 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.706402 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.712609 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.713157 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.717635 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-svncm-pull\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.718453 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"builder-dockercfg-svncm-push\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.727482 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.735823 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44g6x\" (UniqueName: \"kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x\") pod \"service-telemetry-framework-index-1-build\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:05 crc kubenswrapper[4975]: I0126 00:22:05.827465 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:22:08 crc kubenswrapper[4975]: I0126 00:22:08.798723 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-framework-index-1-build"] Jan 26 00:22:08 crc kubenswrapper[4975]: W0126 00:22:08.819875 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a48a90e_a1cb_425f_9260_b40773491d61.slice/crio-008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87 WatchSource:0}: Error finding container 008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87: Status 404 returned error can't find the container with id 008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87 Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.036019 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerStarted","Data":"008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87"} Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.037574 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" event={"ID":"a9fa8b62-74b2-4068-b94e-1968a498a379","Type":"ContainerStarted","Data":"4a22e3d7140450357332ffffce6b63c509939e30129edd6fb2800c7835984d72"} Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.037715 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.039471 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" event={"ID":"44e30ae9-4d62-4cdd-92c9-5e20b05bec04","Type":"ContainerStarted","Data":"3295ab9f575300f84acb7f2e475fe6fcbfeb040667ce8ba8bb307ce14d29d470"} Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.041520 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" event={"ID":"f403cd64-91dd-45f8-b0b0-981505389e7a","Type":"ContainerStarted","Data":"6c5e0c1383cc0479c298612e1b652f50e26330313b9768a4afdf5567d94b62aa"} Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.043436 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"9ca3c33e-c168-4aec-b194-821f5b3f3995","Type":"ContainerStarted","Data":"d9831cb634e5a7c814808987686f072204f1e74346130ab23c96381576609a46"} Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.057362 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" podStartSLOduration=1.7949345399999999 podStartE2EDuration="16.057349838s" podCreationTimestamp="2026-01-26 00:21:53 +0000 UTC" firstStartedPulling="2026-01-26 00:21:54.395167105 +0000 UTC m=+898.516372599" lastFinishedPulling="2026-01-26 00:22:08.657582403 +0000 UTC m=+912.778787897" observedRunningTime="2026-01-26 00:22:09.053077238 +0000 UTC m=+913.174282732" watchObservedRunningTime="2026-01-26 00:22:09.057349838 +0000 UTC m=+913.178555332" Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.076974 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-x2vvk" podStartSLOduration=2.083555454 podStartE2EDuration="14.076952548s" podCreationTimestamp="2026-01-26 00:21:55 +0000 UTC" firstStartedPulling="2026-01-26 00:21:56.72690696 +0000 UTC m=+900.848112454" lastFinishedPulling="2026-01-26 00:22:08.720304054 +0000 UTC m=+912.841509548" observedRunningTime="2026-01-26 00:22:09.070906798 +0000 UTC m=+913.192112292" watchObservedRunningTime="2026-01-26 00:22:09.076952548 +0000 UTC m=+913.198158042" Jan 26 00:22:09 crc kubenswrapper[4975]: I0126 00:22:09.151311 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n" podStartSLOduration=-9223371963.70348 podStartE2EDuration="1m13.151296205s" podCreationTimestamp="2026-01-26 00:20:56 +0000 UTC" firstStartedPulling="2026-01-26 00:20:58.081964587 +0000 UTC m=+842.203170091" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:22:09.14756131 +0000 UTC m=+913.268766814" watchObservedRunningTime="2026-01-26 00:22:09.151296205 +0000 UTC m=+913.272501699" Jan 26 00:22:10 crc kubenswrapper[4975]: I0126 00:22:10.481338 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:22:10 crc kubenswrapper[4975]: I0126 00:22:10.481441 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:22:11 crc kubenswrapper[4975]: I0126 00:22:11.059948 4975 generic.go:334] "Generic (PLEG): container finished" podID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerID="d9831cb634e5a7c814808987686f072204f1e74346130ab23c96381576609a46" exitCode=0 Jan 26 00:22:11 crc kubenswrapper[4975]: I0126 00:22:11.060389 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"9ca3c33e-c168-4aec-b194-821f5b3f3995","Type":"ContainerDied","Data":"d9831cb634e5a7c814808987686f072204f1e74346130ab23c96381576609a46"} Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.068981 4975 generic.go:334] "Generic (PLEG): container finished" podID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerID="32f8dfec1dd7329eecab9687da3d1bd283a67fb6063bbb57dd0d0b0b5abcd055" exitCode=0 Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.069041 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"9ca3c33e-c168-4aec-b194-821f5b3f3995","Type":"ContainerDied","Data":"32f8dfec1dd7329eecab9687da3d1bd283a67fb6063bbb57dd0d0b0b5abcd055"} Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.466332 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-r2hdr"] Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.468461 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.475966 4975 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-szdkq" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.479012 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-r2hdr"] Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.499016 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ch65\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-kube-api-access-8ch65\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.499231 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-bound-sa-token\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.600572 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-bound-sa-token\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.600653 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ch65\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-kube-api-access-8ch65\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.627015 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ch65\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-kube-api-access-8ch65\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.627032 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e64f1df-24b4-4587-8d7e-9c79667d3575-bound-sa-token\") pod \"cert-manager-86cb77c54b-r2hdr\" (UID: \"1e64f1df-24b4-4587-8d7e-9c79667d3575\") " pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:12 crc kubenswrapper[4975]: I0126 00:22:12.793883 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-r2hdr" Jan 26 00:22:13 crc kubenswrapper[4975]: I0126 00:22:13.994977 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-4lbtg" Jan 26 00:22:19 crc kubenswrapper[4975]: I0126 00:22:19.776093 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-r2hdr"] Jan 26 00:22:19 crc kubenswrapper[4975]: W0126 00:22:19.785090 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e64f1df_24b4_4587_8d7e_9c79667d3575.slice/crio-96e96bc12c2c9fa46f3053f4e7f95c935876ca8cde597c9bd477b0202f6a683f WatchSource:0}: Error finding container 96e96bc12c2c9fa46f3053f4e7f95c935876ca8cde597c9bd477b0202f6a683f: Status 404 returned error can't find the container with id 96e96bc12c2c9fa46f3053f4e7f95c935876ca8cde597c9bd477b0202f6a683f Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.294562 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerStarted","Data":"50d686112484d913ddda357a3f030fc357bcbe8054eb442cc841c76afaf9b4a3"} Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.296235 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-r2hdr" event={"ID":"1e64f1df-24b4-4587-8d7e-9c79667d3575","Type":"ContainerStarted","Data":"7ee5f63a44bce0b005d7345bd138074a846eb172e5d985a34abe393d96a5f24b"} Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.296281 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-r2hdr" event={"ID":"1e64f1df-24b4-4587-8d7e-9c79667d3575","Type":"ContainerStarted","Data":"96e96bc12c2c9fa46f3053f4e7f95c935876ca8cde597c9bd477b0202f6a683f"} Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.298910 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"9ca3c33e-c168-4aec-b194-821f5b3f3995","Type":"ContainerStarted","Data":"b275fb9c86fa793ff3eae0f48a65c1e84701e9ddebba241022512782134aeec3"} Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.299159 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.366710 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=12.793574449 podStartE2EDuration="1m0.366691924s" podCreationTimestamp="2026-01-26 00:21:20 +0000 UTC" firstStartedPulling="2026-01-26 00:21:21.086441083 +0000 UTC m=+865.207646577" lastFinishedPulling="2026-01-26 00:22:08.659558558 +0000 UTC m=+912.780764052" observedRunningTime="2026-01-26 00:22:20.365744568 +0000 UTC m=+924.486950062" watchObservedRunningTime="2026-01-26 00:22:20.366691924 +0000 UTC m=+924.487897418" Jan 26 00:22:20 crc kubenswrapper[4975]: I0126 00:22:20.384694 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-r2hdr" podStartSLOduration=8.384676429 podStartE2EDuration="8.384676429s" podCreationTimestamp="2026-01-26 00:22:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:22:20.382610601 +0000 UTC m=+924.503816095" watchObservedRunningTime="2026-01-26 00:22:20.384676429 +0000 UTC m=+924.505881933" Jan 26 00:22:21 crc kubenswrapper[4975]: I0126 00:22:21.304917 4975 generic.go:334] "Generic (PLEG): container finished" podID="0a48a90e-a1cb-425f-9260-b40773491d61" containerID="50d686112484d913ddda357a3f030fc357bcbe8054eb442cc841c76afaf9b4a3" exitCode=0 Jan 26 00:22:21 crc kubenswrapper[4975]: I0126 00:22:21.304961 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerDied","Data":"50d686112484d913ddda357a3f030fc357bcbe8054eb442cc841c76afaf9b4a3"} Jan 26 00:22:23 crc kubenswrapper[4975]: I0126 00:22:23.319130 4975 generic.go:334] "Generic (PLEG): container finished" podID="0a48a90e-a1cb-425f-9260-b40773491d61" containerID="0916e8a60f395a3f2373335d96e861b4d23432840e9d9b7b7106e1dc8ad69b93" exitCode=0 Jan 26 00:22:23 crc kubenswrapper[4975]: I0126 00:22:23.319479 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerDied","Data":"0916e8a60f395a3f2373335d96e861b4d23432840e9d9b7b7106e1dc8ad69b93"} Jan 26 00:22:23 crc kubenswrapper[4975]: I0126 00:22:23.612926 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-framework-index-1-build_0a48a90e-a1cb-425f-9260-b40773491d61/manage-dockerfile/0.log" Jan 26 00:22:24 crc kubenswrapper[4975]: I0126 00:22:24.333669 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerStarted","Data":"089e349f04adb65ada7f6549ad873642642e62e7699cc039031bcbe66c8029b3"} Jan 26 00:22:24 crc kubenswrapper[4975]: I0126 00:22:24.383542 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-framework-index-1-build" podStartSLOduration=8.801366261 podStartE2EDuration="19.383518199s" podCreationTimestamp="2026-01-26 00:22:05 +0000 UTC" firstStartedPulling="2026-01-26 00:22:08.82237332 +0000 UTC m=+912.943578814" lastFinishedPulling="2026-01-26 00:22:19.404525258 +0000 UTC m=+923.525730752" observedRunningTime="2026-01-26 00:22:24.382308765 +0000 UTC m=+928.503514259" watchObservedRunningTime="2026-01-26 00:22:24.383518199 +0000 UTC m=+928.504723693" Jan 26 00:22:30 crc kubenswrapper[4975]: I0126 00:22:30.801947 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerName="elasticsearch" probeResult="failure" output=< Jan 26 00:22:30 crc kubenswrapper[4975]: {"timestamp": "2026-01-26T00:22:30+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 26 00:22:30 crc kubenswrapper[4975]: > Jan 26 00:22:35 crc kubenswrapper[4975]: I0126 00:22:35.825649 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerName="elasticsearch" probeResult="failure" output=< Jan 26 00:22:35 crc kubenswrapper[4975]: {"timestamp": "2026-01-26T00:22:35+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 26 00:22:35 crc kubenswrapper[4975]: > Jan 26 00:22:40 crc kubenswrapper[4975]: I0126 00:22:40.481255 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:22:40 crc kubenswrapper[4975]: I0126 00:22:40.481906 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:22:40 crc kubenswrapper[4975]: I0126 00:22:40.891093 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerName="elasticsearch" probeResult="failure" output=< Jan 26 00:22:40 crc kubenswrapper[4975]: {"timestamp": "2026-01-26T00:22:40+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 26 00:22:40 crc kubenswrapper[4975]: > Jan 26 00:22:45 crc kubenswrapper[4975]: I0126 00:22:45.678810 4975 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="9ca3c33e-c168-4aec-b194-821f5b3f3995" containerName="elasticsearch" probeResult="failure" output=< Jan 26 00:22:45 crc kubenswrapper[4975]: {"timestamp": "2026-01-26T00:22:45+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 26 00:22:45 crc kubenswrapper[4975]: > Jan 26 00:22:51 crc kubenswrapper[4975]: I0126 00:22:51.548874 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Jan 26 00:23:10 crc kubenswrapper[4975]: I0126 00:23:10.482033 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:23:10 crc kubenswrapper[4975]: I0126 00:23:10.482988 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:23:10 crc kubenswrapper[4975]: I0126 00:23:10.483067 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:23:10 crc kubenswrapper[4975]: I0126 00:23:10.484031 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:23:10 crc kubenswrapper[4975]: I0126 00:23:10.484111 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515" gracePeriod=600 Jan 26 00:23:11 crc kubenswrapper[4975]: I0126 00:23:11.045749 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515" exitCode=0 Jan 26 00:23:11 crc kubenswrapper[4975]: I0126 00:23:11.046612 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515"} Jan 26 00:23:11 crc kubenswrapper[4975]: I0126 00:23:11.046916 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8"} Jan 26 00:23:11 crc kubenswrapper[4975]: I0126 00:23:11.046975 4975 scope.go:117] "RemoveContainer" containerID="54e8ca5fca0ddd7e187eb19ea2c58ccba0fed975c1e1fa56247a980eb8312698" Jan 26 00:23:16 crc kubenswrapper[4975]: I0126 00:23:16.080051 4975 generic.go:334] "Generic (PLEG): container finished" podID="0a48a90e-a1cb-425f-9260-b40773491d61" containerID="089e349f04adb65ada7f6549ad873642642e62e7699cc039031bcbe66c8029b3" exitCode=0 Jan 26 00:23:16 crc kubenswrapper[4975]: I0126 00:23:16.080142 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerDied","Data":"089e349f04adb65ada7f6549ad873642642e62e7699cc039031bcbe66c8029b3"} Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.342980 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384546 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384612 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384664 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44g6x\" (UniqueName: \"kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384713 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384768 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-svncm-pull\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384833 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384883 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384919 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.384964 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385014 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385016 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir" (OuterVolumeSpecName: "buildcachedir") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "buildcachedir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385036 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385080 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385132 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"builder-dockercfg-svncm-push\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push\") pod \"0a48a90e-a1cb-425f-9260-b40773491d61\" (UID: \"0a48a90e-a1cb-425f-9260-b40773491d61\") " Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385450 4975 reconciler_common.go:293] "Volume detached for volume \"buildcachedir\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-buildcachedir\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.385079 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets" (OuterVolumeSpecName: "node-pullsecrets") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "node-pullsecrets". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.386879 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs" (OuterVolumeSpecName: "build-system-configs") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "build-system-configs". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.387284 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir" (OuterVolumeSpecName: "buildworkdir") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "buildworkdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.387826 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run" (OuterVolumeSpecName: "container-storage-run") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "container-storage-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.388144 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles" (OuterVolumeSpecName: "build-ca-bundles") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "build-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.389182 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles" (OuterVolumeSpecName: "build-proxy-ca-bundles") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "build-proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.390396 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull" (OuterVolumeSpecName: "builder-dockercfg-svncm-pull") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "builder-dockercfg-svncm-pull". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.390509 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x" (OuterVolumeSpecName: "kube-api-access-44g6x") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "kube-api-access-44g6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.391322 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push" (OuterVolumeSpecName: "builder-dockercfg-svncm-push") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "builder-dockercfg-svncm-push". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.391575 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume" (OuterVolumeSpecName: "service-telemetry-framework-index-dockercfg-user-build-volume") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "service-telemetry-framework-index-dockercfg-user-build-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.486972 4975 reconciler_common.go:293] "Volume detached for volume \"buildworkdir\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-buildworkdir\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487312 4975 reconciler_common.go:293] "Volume detached for volume \"build-system-configs\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-system-configs\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487328 4975 reconciler_common.go:293] "Volume detached for volume \"build-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487339 4975 reconciler_common.go:293] "Volume detached for volume \"build-proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0a48a90e-a1cb-425f-9260-b40773491d61-build-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487351 4975 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-svncm-push\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-push\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487365 4975 reconciler_common.go:293] "Volume detached for volume \"service-telemetry-framework-index-dockercfg-user-build-volume\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-service-telemetry-framework-index-dockercfg-user-build-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487378 4975 reconciler_common.go:293] "Volume detached for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/0a48a90e-a1cb-425f-9260-b40773491d61-node-pullsecrets\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487390 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44g6x\" (UniqueName: \"kubernetes.io/projected/0a48a90e-a1cb-425f-9260-b40773491d61-kube-api-access-44g6x\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487401 4975 reconciler_common.go:293] "Volume detached for volume \"builder-dockercfg-svncm-pull\" (UniqueName: \"kubernetes.io/secret/0a48a90e-a1cb-425f-9260-b40773491d61-builder-dockercfg-svncm-pull\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.487412 4975 reconciler_common.go:293] "Volume detached for volume \"container-storage-run\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-run\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.593608 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache" (OuterVolumeSpecName: "build-blob-cache") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "build-blob-cache". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:23:17 crc kubenswrapper[4975]: I0126 00:23:17.690128 4975 reconciler_common.go:293] "Volume detached for volume \"build-blob-cache\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-build-blob-cache\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.096879 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-framework-index-1-build" event={"ID":"0a48a90e-a1cb-425f-9260-b40773491d61","Type":"ContainerDied","Data":"008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87"} Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.096929 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="008d7943c4945ebcdcc08f8ca2613059b28bac7f227c0049d74a83a76d83df87" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.097006 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-framework-index-1-build" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.431227 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root" (OuterVolumeSpecName: "container-storage-root") pod "0a48a90e-a1cb-425f-9260-b40773491d61" (UID: "0a48a90e-a1cb-425f-9260-b40773491d61"). InnerVolumeSpecName "container-storage-root". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.506602 4975 reconciler_common.go:293] "Volume detached for volume \"container-storage-root\" (UniqueName: \"kubernetes.io/empty-dir/0a48a90e-a1cb-425f-9260-b40773491d61-container-storage-root\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.666617 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:18 crc kubenswrapper[4975]: E0126 00:23:18.666998 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="docker-build" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.667022 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="docker-build" Jan 26 00:23:18 crc kubenswrapper[4975]: E0126 00:23:18.667031 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="manage-dockerfile" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.667040 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="manage-dockerfile" Jan 26 00:23:18 crc kubenswrapper[4975]: E0126 00:23:18.667061 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="git-clone" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.667069 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="git-clone" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.667172 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a48a90e-a1cb-425f-9260-b40773491d61" containerName="docker-build" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.667679 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.670626 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"infrawatch-operators-dockercfg-m7sr7" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.679057 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.709427 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drjcn\" (UniqueName: \"kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn\") pod \"infrawatch-operators-92tz6\" (UID: \"1b417e79-7419-4613-ae68-45debb8bd6ae\") " pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:18 crc kubenswrapper[4975]: I0126 00:23:18.810589 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drjcn\" (UniqueName: \"kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn\") pod \"infrawatch-operators-92tz6\" (UID: \"1b417e79-7419-4613-ae68-45debb8bd6ae\") " pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:19 crc kubenswrapper[4975]: I0126 00:23:18.829005 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drjcn\" (UniqueName: \"kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn\") pod \"infrawatch-operators-92tz6\" (UID: \"1b417e79-7419-4613-ae68-45debb8bd6ae\") " pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:19 crc kubenswrapper[4975]: I0126 00:23:18.987713 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:19 crc kubenswrapper[4975]: I0126 00:23:19.432299 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:20 crc kubenswrapper[4975]: I0126 00:23:20.111191 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-92tz6" event={"ID":"1b417e79-7419-4613-ae68-45debb8bd6ae","Type":"ContainerStarted","Data":"45ddc418353b11a1877169d8b1de2b276408cef91638e4b3cf202e84b3ed9d7a"} Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.043455 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.855279 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-75cbk"] Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.856450 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.861883 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-75cbk"] Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.891208 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjz2f\" (UniqueName: \"kubernetes.io/projected/883c2793-9a31-47f5-a8d8-8f91597a8803-kube-api-access-fjz2f\") pod \"infrawatch-operators-75cbk\" (UID: \"883c2793-9a31-47f5-a8d8-8f91597a8803\") " pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:23 crc kubenswrapper[4975]: I0126 00:23:23.992814 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjz2f\" (UniqueName: \"kubernetes.io/projected/883c2793-9a31-47f5-a8d8-8f91597a8803-kube-api-access-fjz2f\") pod \"infrawatch-operators-75cbk\" (UID: \"883c2793-9a31-47f5-a8d8-8f91597a8803\") " pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:24 crc kubenswrapper[4975]: I0126 00:23:24.015989 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjz2f\" (UniqueName: \"kubernetes.io/projected/883c2793-9a31-47f5-a8d8-8f91597a8803-kube-api-access-fjz2f\") pod \"infrawatch-operators-75cbk\" (UID: \"883c2793-9a31-47f5-a8d8-8f91597a8803\") " pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:24 crc kubenswrapper[4975]: I0126 00:23:24.183644 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:25 crc kubenswrapper[4975]: I0126 00:23:25.330449 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-75cbk"] Jan 26 00:23:27 crc kubenswrapper[4975]: W0126 00:23:27.624956 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod883c2793_9a31_47f5_a8d8_8f91597a8803.slice/crio-bda337627199b29d5216d6353789bfe7b80a43ae6fff9926b274915a4f3ac584 WatchSource:0}: Error finding container bda337627199b29d5216d6353789bfe7b80a43ae6fff9926b274915a4f3ac584: Status 404 returned error can't find the container with id bda337627199b29d5216d6353789bfe7b80a43ae6fff9926b274915a4f3ac584 Jan 26 00:23:28 crc kubenswrapper[4975]: I0126 00:23:28.190222 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-75cbk" event={"ID":"883c2793-9a31-47f5-a8d8-8f91597a8803","Type":"ContainerStarted","Data":"bda337627199b29d5216d6353789bfe7b80a43ae6fff9926b274915a4f3ac584"} Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.217769 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-75cbk" event={"ID":"883c2793-9a31-47f5-a8d8-8f91597a8803","Type":"ContainerStarted","Data":"4ce435ae4241014bb32e6e3caea465bd14d9fca888fa90d6febf80285f1c8a5f"} Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.220648 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-92tz6" event={"ID":"1b417e79-7419-4613-ae68-45debb8bd6ae","Type":"ContainerStarted","Data":"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432"} Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.220834 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-92tz6" podUID="1b417e79-7419-4613-ae68-45debb8bd6ae" containerName="registry-server" containerID="cri-o://49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432" gracePeriod=2 Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.236101 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-75cbk" podStartSLOduration=4.834400771 podStartE2EDuration="9.236073322s" podCreationTimestamp="2026-01-26 00:23:23 +0000 UTC" firstStartedPulling="2026-01-26 00:23:27.627017508 +0000 UTC m=+991.748223002" lastFinishedPulling="2026-01-26 00:23:32.028690049 +0000 UTC m=+996.149895553" observedRunningTime="2026-01-26 00:23:32.233901981 +0000 UTC m=+996.355107475" watchObservedRunningTime="2026-01-26 00:23:32.236073322 +0000 UTC m=+996.357278816" Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.252854 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-92tz6" podStartSLOduration=1.628466827 podStartE2EDuration="14.252833063s" podCreationTimestamp="2026-01-26 00:23:18 +0000 UTC" firstStartedPulling="2026-01-26 00:23:19.442860285 +0000 UTC m=+983.564065779" lastFinishedPulling="2026-01-26 00:23:32.067226521 +0000 UTC m=+996.188432015" observedRunningTime="2026-01-26 00:23:32.251116024 +0000 UTC m=+996.372321518" watchObservedRunningTime="2026-01-26 00:23:32.252833063 +0000 UTC m=+996.374038567" Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.540941 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_infrawatch-operators-92tz6_1b417e79-7419-4613-ae68-45debb8bd6ae/registry-server/0.log" Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.541311 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.721621 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drjcn\" (UniqueName: \"kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn\") pod \"1b417e79-7419-4613-ae68-45debb8bd6ae\" (UID: \"1b417e79-7419-4613-ae68-45debb8bd6ae\") " Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.729804 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn" (OuterVolumeSpecName: "kube-api-access-drjcn") pod "1b417e79-7419-4613-ae68-45debb8bd6ae" (UID: "1b417e79-7419-4613-ae68-45debb8bd6ae"). InnerVolumeSpecName "kube-api-access-drjcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:23:32 crc kubenswrapper[4975]: I0126 00:23:32.823008 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drjcn\" (UniqueName: \"kubernetes.io/projected/1b417e79-7419-4613-ae68-45debb8bd6ae-kube-api-access-drjcn\") on node \"crc\" DevicePath \"\"" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.228781 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_infrawatch-operators-92tz6_1b417e79-7419-4613-ae68-45debb8bd6ae/registry-server/0.log" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.228825 4975 generic.go:334] "Generic (PLEG): container finished" podID="1b417e79-7419-4613-ae68-45debb8bd6ae" containerID="49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432" exitCode=2 Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.228888 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-92tz6" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.228983 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-92tz6" event={"ID":"1b417e79-7419-4613-ae68-45debb8bd6ae","Type":"ContainerDied","Data":"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432"} Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.229027 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-92tz6" event={"ID":"1b417e79-7419-4613-ae68-45debb8bd6ae","Type":"ContainerDied","Data":"45ddc418353b11a1877169d8b1de2b276408cef91638e4b3cf202e84b3ed9d7a"} Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.229047 4975 scope.go:117] "RemoveContainer" containerID="49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.247231 4975 scope.go:117] "RemoveContainer" containerID="49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432" Jan 26 00:23:33 crc kubenswrapper[4975]: E0126 00:23:33.247825 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432\": container with ID starting with 49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432 not found: ID does not exist" containerID="49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.247906 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432"} err="failed to get container status \"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432\": rpc error: code = NotFound desc = could not find container \"49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432\": container with ID starting with 49745c235c9e7882e472d9fa0d611f76235d20278c3b35518f8ce9292ac47432 not found: ID does not exist" Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.264222 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:33 crc kubenswrapper[4975]: I0126 00:23:33.270267 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-92tz6"] Jan 26 00:23:34 crc kubenswrapper[4975]: I0126 00:23:34.155423 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b417e79-7419-4613-ae68-45debb8bd6ae" path="/var/lib/kubelet/pods/1b417e79-7419-4613-ae68-45debb8bd6ae/volumes" Jan 26 00:23:34 crc kubenswrapper[4975]: I0126 00:23:34.184028 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:34 crc kubenswrapper[4975]: I0126 00:23:34.184098 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:34 crc kubenswrapper[4975]: I0126 00:23:34.218361 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:44 crc kubenswrapper[4975]: I0126 00:23:44.219823 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-75cbk" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.653525 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:23:45 crc kubenswrapper[4975]: E0126 00:23:45.653943 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1b417e79-7419-4613-ae68-45debb8bd6ae" containerName="registry-server" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.653964 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="1b417e79-7419-4613-ae68-45debb8bd6ae" containerName="registry-server" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.654168 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="1b417e79-7419-4613-ae68-45debb8bd6ae" containerName="registry-server" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.655892 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.662206 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.826960 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.827073 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8lmlr\" (UniqueName: \"kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.827153 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.928333 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8lmlr\" (UniqueName: \"kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.928443 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.928490 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.929132 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.929133 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.955971 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8lmlr\" (UniqueName: \"kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr\") pod \"community-operators-fjmbb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:45 crc kubenswrapper[4975]: I0126 00:23:45.974136 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:46 crc kubenswrapper[4975]: I0126 00:23:46.222673 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:23:46 crc kubenswrapper[4975]: I0126 00:23:46.320130 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerStarted","Data":"31b69902b7b4069b787b41eb256dd712f567c01c874d2c20739ecdc371cb455b"} Jan 26 00:23:47 crc kubenswrapper[4975]: I0126 00:23:47.326713 4975 generic.go:334] "Generic (PLEG): container finished" podID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerID="672b538fd7390a72b99f03ca7d1429378f188e82cab6dad52701be1961825c56" exitCode=0 Jan 26 00:23:47 crc kubenswrapper[4975]: I0126 00:23:47.326812 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerDied","Data":"672b538fd7390a72b99f03ca7d1429378f188e82cab6dad52701be1961825c56"} Jan 26 00:23:49 crc kubenswrapper[4975]: I0126 00:23:49.342606 4975 generic.go:334] "Generic (PLEG): container finished" podID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerID="3ba2ae888fb05150ab37db16aeabf94c5976e38889b7dfd1ef6ece3c96a21571" exitCode=0 Jan 26 00:23:49 crc kubenswrapper[4975]: I0126 00:23:49.342666 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerDied","Data":"3ba2ae888fb05150ab37db16aeabf94c5976e38889b7dfd1ef6ece3c96a21571"} Jan 26 00:23:50 crc kubenswrapper[4975]: I0126 00:23:50.350640 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerStarted","Data":"0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1"} Jan 26 00:23:50 crc kubenswrapper[4975]: I0126 00:23:50.371577 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fjmbb" podStartSLOduration=2.955510574 podStartE2EDuration="5.371557382s" podCreationTimestamp="2026-01-26 00:23:45 +0000 UTC" firstStartedPulling="2026-01-26 00:23:47.329641541 +0000 UTC m=+1011.450847035" lastFinishedPulling="2026-01-26 00:23:49.745688349 +0000 UTC m=+1013.866893843" observedRunningTime="2026-01-26 00:23:50.366612973 +0000 UTC m=+1014.487818477" watchObservedRunningTime="2026-01-26 00:23:50.371557382 +0000 UTC m=+1014.492762876" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.077234 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t"] Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.078787 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.093280 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t"] Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.179424 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.179544 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.179576 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4td2\" (UniqueName: \"kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.281196 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.281371 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.281420 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4td2\" (UniqueName: \"kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.281802 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.281821 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.304879 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4td2\" (UniqueName: \"kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2\") pod \"27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.393458 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.804626 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t"] Jan 26 00:23:55 crc kubenswrapper[4975]: W0126 00:23:55.810930 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ee5c0d8_982d_4529_b84f_82ee182a3007.slice/crio-e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5 WatchSource:0}: Error finding container e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5: Status 404 returned error can't find the container with id e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5 Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.891851 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495"] Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.893436 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.898239 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.898534 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495"] Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.975121 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:55 crc kubenswrapper[4975]: I0126 00:23:55.975186 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.002936 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.003014 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.003493 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgq5l\" (UniqueName: \"kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.023623 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.104947 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.105037 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgq5l\" (UniqueName: \"kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.105113 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.105611 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.105675 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.124839 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgq5l\" (UniqueName: \"kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.218181 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.226968 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.402915 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" event={"ID":"4ee5c0d8-982d-4529-b84f-82ee182a3007","Type":"ContainerStarted","Data":"e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5"} Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.450162 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.671747 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495"] Jan 26 00:23:56 crc kubenswrapper[4975]: W0126 00:23:56.682930 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod56e34236_a062_4605_8a38_53bb6c213c74.slice/crio-64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe WatchSource:0}: Error finding container 64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe: Status 404 returned error can't find the container with id 64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.888006 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz"] Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.892267 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:56 crc kubenswrapper[4975]: I0126 00:23:56.895688 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz"] Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.018090 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn5dg\" (UniqueName: \"kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.018156 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.018187 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.119582 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn5dg\" (UniqueName: \"kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.119641 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.119664 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.120469 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.120602 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.138200 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn5dg\" (UniqueName: \"kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg\") pod \"cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.231317 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.411457 4975 generic.go:334] "Generic (PLEG): container finished" podID="56e34236-a062-4605-8a38-53bb6c213c74" containerID="ee3b19c536690966a65e32a9026292a002fb7407b46d8f0694c6c37bc1bb212c" exitCode=0 Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.411531 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" event={"ID":"56e34236-a062-4605-8a38-53bb6c213c74","Type":"ContainerDied","Data":"ee3b19c536690966a65e32a9026292a002fb7407b46d8f0694c6c37bc1bb212c"} Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.411566 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" event={"ID":"56e34236-a062-4605-8a38-53bb6c213c74","Type":"ContainerStarted","Data":"64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe"} Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.414940 4975 generic.go:334] "Generic (PLEG): container finished" podID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerID="61fc251ff5f30fe171942e02bfff26d2b36bd0e0e7636ebf9ebeadc293f7122f" exitCode=0 Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.415371 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" event={"ID":"4ee5c0d8-982d-4529-b84f-82ee182a3007","Type":"ContainerDied","Data":"61fc251ff5f30fe171942e02bfff26d2b36bd0e0e7636ebf9ebeadc293f7122f"} Jan 26 00:23:57 crc kubenswrapper[4975]: W0126 00:23:57.508998 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe75c85e_6aa9_41e2_a9aa_03538f1e61b6.slice/crio-bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e WatchSource:0}: Error finding container bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e: Status 404 returned error can't find the container with id bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e Jan 26 00:23:57 crc kubenswrapper[4975]: I0126 00:23:57.513538 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz"] Jan 26 00:23:58 crc kubenswrapper[4975]: I0126 00:23:58.422889 4975 generic.go:334] "Generic (PLEG): container finished" podID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerID="a4d0de26f03dd7c43a47ed5a323b9d3c8f23a72281715f0bb11e2dc6a4a8404d" exitCode=0 Jan 26 00:23:58 crc kubenswrapper[4975]: I0126 00:23:58.422928 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" event={"ID":"4ee5c0d8-982d-4529-b84f-82ee182a3007","Type":"ContainerDied","Data":"a4d0de26f03dd7c43a47ed5a323b9d3c8f23a72281715f0bb11e2dc6a4a8404d"} Jan 26 00:23:58 crc kubenswrapper[4975]: I0126 00:23:58.424866 4975 generic.go:334] "Generic (PLEG): container finished" podID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerID="afbf7f8046e9bc893dcefeb6e03a98f3a71149756cf2b44545a26b3ef4208261" exitCode=0 Jan 26 00:23:58 crc kubenswrapper[4975]: I0126 00:23:58.424900 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" event={"ID":"be75c85e-6aa9-41e2-a9aa-03538f1e61b6","Type":"ContainerDied","Data":"afbf7f8046e9bc893dcefeb6e03a98f3a71149756cf2b44545a26b3ef4208261"} Jan 26 00:23:58 crc kubenswrapper[4975]: I0126 00:23:58.424918 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" event={"ID":"be75c85e-6aa9-41e2-a9aa-03538f1e61b6","Type":"ContainerStarted","Data":"bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e"} Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.439403 4975 generic.go:334] "Generic (PLEG): container finished" podID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerID="0d7c0b5fca14d1a3d1d8bb4652ab8c3ba0d5eb7c974938078b1eb8d3bbd19abb" exitCode=0 Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.439493 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" event={"ID":"be75c85e-6aa9-41e2-a9aa-03538f1e61b6","Type":"ContainerDied","Data":"0d7c0b5fca14d1a3d1d8bb4652ab8c3ba0d5eb7c974938078b1eb8d3bbd19abb"} Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.443097 4975 generic.go:334] "Generic (PLEG): container finished" podID="56e34236-a062-4605-8a38-53bb6c213c74" containerID="6a7c28961aafe9fae621af280b2b09ba49dffaa2b4fccd61d7db850b66223f8a" exitCode=0 Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.443182 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" event={"ID":"56e34236-a062-4605-8a38-53bb6c213c74","Type":"ContainerDied","Data":"6a7c28961aafe9fae621af280b2b09ba49dffaa2b4fccd61d7db850b66223f8a"} Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.446353 4975 generic.go:334] "Generic (PLEG): container finished" podID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerID="8199d1416fa80d552251ff2fc6d74bd4b4f2d0035b3b33b6c93e61c8d201ae09" exitCode=0 Jan 26 00:23:59 crc kubenswrapper[4975]: I0126 00:23:59.446389 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" event={"ID":"4ee5c0d8-982d-4529-b84f-82ee182a3007","Type":"ContainerDied","Data":"8199d1416fa80d552251ff2fc6d74bd4b4f2d0035b3b33b6c93e61c8d201ae09"} Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.455366 4975 generic.go:334] "Generic (PLEG): container finished" podID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerID="1eeab39803cba398d83560d57b616602e37183db4557cea405e066801c41f97b" exitCode=0 Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.455468 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" event={"ID":"be75c85e-6aa9-41e2-a9aa-03538f1e61b6","Type":"ContainerDied","Data":"1eeab39803cba398d83560d57b616602e37183db4557cea405e066801c41f97b"} Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.457798 4975 generic.go:334] "Generic (PLEG): container finished" podID="56e34236-a062-4605-8a38-53bb6c213c74" containerID="c9425ee82f77d66b52064d950e9e71f78fbf22d44c06895161451c534b0d6c80" exitCode=0 Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.457914 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" event={"ID":"56e34236-a062-4605-8a38-53bb6c213c74","Type":"ContainerDied","Data":"c9425ee82f77d66b52064d950e9e71f78fbf22d44c06895161451c534b0d6c80"} Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.713649 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.876717 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle\") pod \"4ee5c0d8-982d-4529-b84f-82ee182a3007\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.876827 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util\") pod \"4ee5c0d8-982d-4529-b84f-82ee182a3007\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.877009 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4td2\" (UniqueName: \"kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2\") pod \"4ee5c0d8-982d-4529-b84f-82ee182a3007\" (UID: \"4ee5c0d8-982d-4529-b84f-82ee182a3007\") " Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.877441 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle" (OuterVolumeSpecName: "bundle") pod "4ee5c0d8-982d-4529-b84f-82ee182a3007" (UID: "4ee5c0d8-982d-4529-b84f-82ee182a3007"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.884118 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2" (OuterVolumeSpecName: "kube-api-access-t4td2") pod "4ee5c0d8-982d-4529-b84f-82ee182a3007" (UID: "4ee5c0d8-982d-4529-b84f-82ee182a3007"). InnerVolumeSpecName "kube-api-access-t4td2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.891536 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util" (OuterVolumeSpecName: "util") pod "4ee5c0d8-982d-4529-b84f-82ee182a3007" (UID: "4ee5c0d8-982d-4529-b84f-82ee182a3007"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.978431 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.978466 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee5c0d8-982d-4529-b84f-82ee182a3007-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:00 crc kubenswrapper[4975]: I0126 00:24:00.978479 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4td2\" (UniqueName: \"kubernetes.io/projected/4ee5c0d8-982d-4529-b84f-82ee182a3007-kube-api-access-t4td2\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.467550 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" event={"ID":"4ee5c0d8-982d-4529-b84f-82ee182a3007","Type":"ContainerDied","Data":"e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5"} Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.467630 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e39d967d5fcf8f635a6188e9d46e80eab00dd3f6f02a183f03383c2070bb10d5" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.467778 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/27e4a3b82b847aaaac340f98fd9ec51c99f28242b589c6c251a26fbc3b4vt6t" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.741956 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.746996 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890463 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgq5l\" (UniqueName: \"kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l\") pod \"56e34236-a062-4605-8a38-53bb6c213c74\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890570 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util\") pod \"56e34236-a062-4605-8a38-53bb6c213c74\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890622 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn5dg\" (UniqueName: \"kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg\") pod \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890686 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util\") pod \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890760 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle\") pod \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\" (UID: \"be75c85e-6aa9-41e2-a9aa-03538f1e61b6\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.890804 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle\") pod \"56e34236-a062-4605-8a38-53bb6c213c74\" (UID: \"56e34236-a062-4605-8a38-53bb6c213c74\") " Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.891932 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle" (OuterVolumeSpecName: "bundle") pod "56e34236-a062-4605-8a38-53bb6c213c74" (UID: "56e34236-a062-4605-8a38-53bb6c213c74"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.892272 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle" (OuterVolumeSpecName: "bundle") pod "be75c85e-6aa9-41e2-a9aa-03538f1e61b6" (UID: "be75c85e-6aa9-41e2-a9aa-03538f1e61b6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.894308 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg" (OuterVolumeSpecName: "kube-api-access-hn5dg") pod "be75c85e-6aa9-41e2-a9aa-03538f1e61b6" (UID: "be75c85e-6aa9-41e2-a9aa-03538f1e61b6"). InnerVolumeSpecName "kube-api-access-hn5dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.894394 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l" (OuterVolumeSpecName: "kube-api-access-mgq5l") pod "56e34236-a062-4605-8a38-53bb6c213c74" (UID: "56e34236-a062-4605-8a38-53bb6c213c74"). InnerVolumeSpecName "kube-api-access-mgq5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.905375 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util" (OuterVolumeSpecName: "util") pod "56e34236-a062-4605-8a38-53bb6c213c74" (UID: "56e34236-a062-4605-8a38-53bb6c213c74"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.907966 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util" (OuterVolumeSpecName: "util") pod "be75c85e-6aa9-41e2-a9aa-03538f1e61b6" (UID: "be75c85e-6aa9-41e2-a9aa-03538f1e61b6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992077 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992404 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn5dg\" (UniqueName: \"kubernetes.io/projected/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-kube-api-access-hn5dg\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992418 4975 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-util\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992428 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/be75c85e-6aa9-41e2-a9aa-03538f1e61b6-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992437 4975 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/56e34236-a062-4605-8a38-53bb6c213c74-bundle\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:01 crc kubenswrapper[4975]: I0126 00:24:01.992446 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgq5l\" (UniqueName: \"kubernetes.io/projected/56e34236-a062-4605-8a38-53bb6c213c74-kube-api-access-mgq5l\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.476343 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" event={"ID":"be75c85e-6aa9-41e2-a9aa-03538f1e61b6","Type":"ContainerDied","Data":"bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e"} Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.476383 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb99cad0f2e2c3009df82bc1f9056ed82eac8b63f66c336a3cbb922709f2937e" Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.476409 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/cd07ebce3b618aaffa8e106dab3e8eb93287fbb0e4c5a9c0f6ea8fc7ebzbfbz" Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.478723 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" event={"ID":"56e34236-a062-4605-8a38-53bb6c213c74","Type":"ContainerDied","Data":"64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe"} Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.478772 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64e992869789ca68d000cf6eaf24c71d117ff61dcef16bc2f71420526c94fbfe" Jan 26 00:24:02 crc kubenswrapper[4975]: I0126 00:24:02.478828 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495" Jan 26 00:24:04 crc kubenswrapper[4975]: I0126 00:24:04.419782 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:24:04 crc kubenswrapper[4975]: I0126 00:24:04.420423 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-fjmbb" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="registry-server" containerID="cri-o://0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" gracePeriod=2 Jan 26 00:24:05 crc kubenswrapper[4975]: E0126 00:24:05.975819 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1 is running failed: container process not found" containerID="0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:24:05 crc kubenswrapper[4975]: E0126 00:24:05.976538 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1 is running failed: container process not found" containerID="0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:24:05 crc kubenswrapper[4975]: E0126 00:24:05.977064 4975 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1 is running failed: container process not found" containerID="0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" cmd=["grpc_health_probe","-addr=:50051"] Jan 26 00:24:05 crc kubenswrapper[4975]: E0126 00:24:05.977107 4975 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-fjmbb" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="registry-server" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296450 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-4h6qb"] Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296717 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296743 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296756 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296762 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296770 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296778 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296792 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296798 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296805 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296811 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296819 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296826 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="util" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296838 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296844 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="pull" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296851 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296857 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: E0126 00:24:07.296867 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296873 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296972 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ee5c0d8-982d-4529-b84f-82ee182a3007" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296986 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="be75c85e-6aa9-41e2-a9aa-03538f1e61b6" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.296995 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="56e34236-a062-4605-8a38-53bb6c213c74" containerName="extract" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.297476 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.301926 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-xs229" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.321481 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-4h6qb"] Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.368936 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhspg\" (UniqueName: \"kubernetes.io/projected/1b40081a-fe71-4f49-9b40-15b0ac5c9a27-kube-api-access-xhspg\") pod \"interconnect-operator-5bb49f789d-4h6qb\" (UID: \"1b40081a-fe71-4f49-9b40-15b0ac5c9a27\") " pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.470513 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhspg\" (UniqueName: \"kubernetes.io/projected/1b40081a-fe71-4f49-9b40-15b0ac5c9a27-kube-api-access-xhspg\") pod \"interconnect-operator-5bb49f789d-4h6qb\" (UID: \"1b40081a-fe71-4f49-9b40-15b0ac5c9a27\") " pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.489583 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhspg\" (UniqueName: \"kubernetes.io/projected/1b40081a-fe71-4f49-9b40-15b0ac5c9a27-kube-api-access-xhspg\") pod \"interconnect-operator-5bb49f789d-4h6qb\" (UID: \"1b40081a-fe71-4f49-9b40-15b0ac5c9a27\") " pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.516974 4975 generic.go:334] "Generic (PLEG): container finished" podID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerID="0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" exitCode=0 Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.517027 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerDied","Data":"0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1"} Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.555859 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.572222 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities\") pod \"ddf952bd-6316-4782-9c91-9744c77eb1eb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.572260 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content\") pod \"ddf952bd-6316-4782-9c91-9744c77eb1eb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.572343 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8lmlr\" (UniqueName: \"kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr\") pod \"ddf952bd-6316-4782-9c91-9744c77eb1eb\" (UID: \"ddf952bd-6316-4782-9c91-9744c77eb1eb\") " Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.574887 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities" (OuterVolumeSpecName: "utilities") pod "ddf952bd-6316-4782-9c91-9744c77eb1eb" (UID: "ddf952bd-6316-4782-9c91-9744c77eb1eb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.578560 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr" (OuterVolumeSpecName: "kube-api-access-8lmlr") pod "ddf952bd-6316-4782-9c91-9744c77eb1eb" (UID: "ddf952bd-6316-4782-9c91-9744c77eb1eb"). InnerVolumeSpecName "kube-api-access-8lmlr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.618157 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.628519 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ddf952bd-6316-4782-9c91-9744c77eb1eb" (UID: "ddf952bd-6316-4782-9c91-9744c77eb1eb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.673705 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8lmlr\" (UniqueName: \"kubernetes.io/projected/ddf952bd-6316-4782-9c91-9744c77eb1eb-kube-api-access-8lmlr\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.673766 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.673782 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ddf952bd-6316-4782-9c91-9744c77eb1eb-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:24:07 crc kubenswrapper[4975]: I0126 00:24:07.852390 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-4h6qb"] Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.532836 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fjmbb" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.533009 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fjmbb" event={"ID":"ddf952bd-6316-4782-9c91-9744c77eb1eb","Type":"ContainerDied","Data":"31b69902b7b4069b787b41eb256dd712f567c01c874d2c20739ecdc371cb455b"} Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.533138 4975 scope.go:117] "RemoveContainer" containerID="0c1860e2f5cec7df9911203dfe0eecd9d90092d752eda75973c4f41fe8c68bb1" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.535360 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" event={"ID":"1b40081a-fe71-4f49-9b40-15b0ac5c9a27","Type":"ContainerStarted","Data":"68e29fb2fce1c4c64d26543afb4faee32352f40dcf93220bbc5058c5bba4aec2"} Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.564912 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.568382 4975 scope.go:117] "RemoveContainer" containerID="3ba2ae888fb05150ab37db16aeabf94c5976e38889b7dfd1ef6ece3c96a21571" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.571604 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-fjmbb"] Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.587424 4975 scope.go:117] "RemoveContainer" containerID="672b538fd7390a72b99f03ca7d1429378f188e82cab6dad52701be1961825c56" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.964992 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-4bswt"] Jan 26 00:24:08 crc kubenswrapper[4975]: E0126 00:24:08.965616 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="extract-content" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.965640 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="extract-content" Jan 26 00:24:08 crc kubenswrapper[4975]: E0126 00:24:08.965656 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="registry-server" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.978756 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="registry-server" Jan 26 00:24:08 crc kubenswrapper[4975]: E0126 00:24:08.978852 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="extract-utilities" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.978868 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="extract-utilities" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.979188 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" containerName="registry-server" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.979842 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.984408 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-snccw" Jan 26 00:24:08 crc kubenswrapper[4975]: I0126 00:24:08.989053 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-4bswt"] Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.118554 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/96b49c24-2f17-4321-a1ad-60db89870c43-runner\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.119007 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvdh8\" (UniqueName: \"kubernetes.io/projected/96b49c24-2f17-4321-a1ad-60db89870c43-kube-api-access-nvdh8\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.220602 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/96b49c24-2f17-4321-a1ad-60db89870c43-runner\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.220653 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvdh8\" (UniqueName: \"kubernetes.io/projected/96b49c24-2f17-4321-a1ad-60db89870c43-kube-api-access-nvdh8\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.221437 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/96b49c24-2f17-4321-a1ad-60db89870c43-runner\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.239254 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvdh8\" (UniqueName: \"kubernetes.io/projected/96b49c24-2f17-4321-a1ad-60db89870c43-kube-api-access-nvdh8\") pod \"smart-gateway-operator-bbbc889bc-4bswt\" (UID: \"96b49c24-2f17-4321-a1ad-60db89870c43\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.297030 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.536780 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-4bswt"] Jan 26 00:24:09 crc kubenswrapper[4975]: W0126 00:24:09.547375 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96b49c24_2f17_4321_a1ad_60db89870c43.slice/crio-fe8f42203d358cc9b0118ee95d00b3084558f6d5e8236d1468c9bbf96a48a126 WatchSource:0}: Error finding container fe8f42203d358cc9b0118ee95d00b3084558f6d5e8236d1468c9bbf96a48a126: Status 404 returned error can't find the container with id fe8f42203d358cc9b0118ee95d00b3084558f6d5e8236d1468c9bbf96a48a126 Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.938895 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z"] Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.939868 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.944107 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-bwbd6" Jan 26 00:24:09 crc kubenswrapper[4975]: I0126 00:24:09.961618 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z"] Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.133040 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvm5f\" (UniqueName: \"kubernetes.io/projected/9625ef50-fe72-40a4-bd92-c3c714a02ac7-kube-api-access-qvm5f\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.133237 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/9625ef50-fe72-40a4-bd92-c3c714a02ac7-runner\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.155041 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddf952bd-6316-4782-9c91-9744c77eb1eb" path="/var/lib/kubelet/pods/ddf952bd-6316-4782-9c91-9744c77eb1eb/volumes" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.235778 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/9625ef50-fe72-40a4-bd92-c3c714a02ac7-runner\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.235850 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvm5f\" (UniqueName: \"kubernetes.io/projected/9625ef50-fe72-40a4-bd92-c3c714a02ac7-kube-api-access-qvm5f\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.236380 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/9625ef50-fe72-40a4-bd92-c3c714a02ac7-runner\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.257179 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvm5f\" (UniqueName: \"kubernetes.io/projected/9625ef50-fe72-40a4-bd92-c3c714a02ac7-kube-api-access-qvm5f\") pod \"service-telemetry-operator-55b89ddfb9-zxp9z\" (UID: \"9625ef50-fe72-40a4-bd92-c3c714a02ac7\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.260794 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.560239 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" event={"ID":"96b49c24-2f17-4321-a1ad-60db89870c43","Type":"ContainerStarted","Data":"fe8f42203d358cc9b0118ee95d00b3084558f6d5e8236d1468c9bbf96a48a126"} Jan 26 00:24:10 crc kubenswrapper[4975]: I0126 00:24:10.698828 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z"] Jan 26 00:24:10 crc kubenswrapper[4975]: W0126 00:24:10.718059 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9625ef50_fe72_40a4_bd92_c3c714a02ac7.slice/crio-4e01397e9a131480a7b700f11bda3a3103687ad367e49aef35c135f7fc9d77dc WatchSource:0}: Error finding container 4e01397e9a131480a7b700f11bda3a3103687ad367e49aef35c135f7fc9d77dc: Status 404 returned error can't find the container with id 4e01397e9a131480a7b700f11bda3a3103687ad367e49aef35c135f7fc9d77dc Jan 26 00:24:11 crc kubenswrapper[4975]: I0126 00:24:11.585065 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" event={"ID":"9625ef50-fe72-40a4-bd92-c3c714a02ac7","Type":"ContainerStarted","Data":"4e01397e9a131480a7b700f11bda3a3103687ad367e49aef35c135f7fc9d77dc"} Jan 26 00:24:31 crc kubenswrapper[4975]: E0126 00:24:31.130104 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Jan 26 00:24:31 crc kubenswrapper[4975]: E0126 00:24:31.130800 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1768085178,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nvdh8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-bbbc889bc-4bswt_service-telemetry(96b49c24-2f17-4321-a1ad-60db89870c43): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 00:24:31 crc kubenswrapper[4975]: E0126 00:24:31.132000 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" podUID="96b49c24-2f17-4321-a1ad-60db89870c43" Jan 26 00:24:31 crc kubenswrapper[4975]: I0126 00:24:31.806491 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" event={"ID":"1b40081a-fe71-4f49-9b40-15b0ac5c9a27","Type":"ContainerStarted","Data":"8dbc8297400235f50ca79e21f5b657bd6fa49d87d415ae5c05b22db8aab0191c"} Jan 26 00:24:31 crc kubenswrapper[4975]: E0126 00:24:31.809201 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" podUID="96b49c24-2f17-4321-a1ad-60db89870c43" Jan 26 00:24:31 crc kubenswrapper[4975]: I0126 00:24:31.847666 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-4h6qb" podStartSLOduration=14.444573953 podStartE2EDuration="24.847646838s" podCreationTimestamp="2026-01-26 00:24:07 +0000 UTC" firstStartedPulling="2026-01-26 00:24:07.870101979 +0000 UTC m=+1031.991307483" lastFinishedPulling="2026-01-26 00:24:18.273174874 +0000 UTC m=+1042.394380368" observedRunningTime="2026-01-26 00:24:31.824074372 +0000 UTC m=+1055.945279866" watchObservedRunningTime="2026-01-26 00:24:31.847646838 +0000 UTC m=+1055.968852322" Jan 26 00:24:35 crc kubenswrapper[4975]: I0126 00:24:35.836244 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" event={"ID":"9625ef50-fe72-40a4-bd92-c3c714a02ac7","Type":"ContainerStarted","Data":"e86b5d0499040e9c05189e8068d889b40b54302d80d6676bbd85d09c849e9b31"} Jan 26 00:24:35 crc kubenswrapper[4975]: I0126 00:24:35.860465 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-zxp9z" podStartSLOduration=1.986770801 podStartE2EDuration="26.860443081s" podCreationTimestamp="2026-01-26 00:24:09 +0000 UTC" firstStartedPulling="2026-01-26 00:24:10.722857039 +0000 UTC m=+1034.844062533" lastFinishedPulling="2026-01-26 00:24:35.596529319 +0000 UTC m=+1059.717734813" observedRunningTime="2026-01-26 00:24:35.852796146 +0000 UTC m=+1059.974001640" watchObservedRunningTime="2026-01-26 00:24:35.860443081 +0000 UTC m=+1059.981648575" Jan 26 00:24:46 crc kubenswrapper[4975]: I0126 00:24:46.915026 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" event={"ID":"96b49c24-2f17-4321-a1ad-60db89870c43","Type":"ContainerStarted","Data":"43b11ab40b26adb7273bc89d2bc57e2bd4e00b82874e62d61376bb96dd136f09"} Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.803629 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bbbc889bc-4bswt" podStartSLOduration=20.332882807 podStartE2EDuration="56.803599025s" podCreationTimestamp="2026-01-26 00:24:08 +0000 UTC" firstStartedPulling="2026-01-26 00:24:09.54866896 +0000 UTC m=+1033.669874454" lastFinishedPulling="2026-01-26 00:24:46.019385178 +0000 UTC m=+1070.140590672" observedRunningTime="2026-01-26 00:24:46.931517615 +0000 UTC m=+1071.052723109" watchObservedRunningTime="2026-01-26 00:25:04.803599025 +0000 UTC m=+1088.924804539" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.806916 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.807891 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.810746 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.811205 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.811480 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.811890 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-wph5k" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.812171 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.812411 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.812595 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.821467 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967696 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w6tdn\" (UniqueName: \"kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967782 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967822 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967868 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967910 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967934 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:04 crc kubenswrapper[4975]: I0126 00:25:04.967957 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069241 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w6tdn\" (UniqueName: \"kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069296 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069345 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069390 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069408 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069428 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.069448 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.070876 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.076268 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.076306 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.077485 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.089410 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.090115 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.091311 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w6tdn\" (UniqueName: \"kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn\") pod \"default-interconnect-68864d46cb-xmfhs\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.135324 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:25:05 crc kubenswrapper[4975]: I0126 00:25:05.597986 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:25:06 crc kubenswrapper[4975]: I0126 00:25:06.043376 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" event={"ID":"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2","Type":"ContainerStarted","Data":"ec75653d28dc0033cd1f184fd87e119088e1cb030cf7741f456a4d7ca7b8e637"} Jan 26 00:25:10 crc kubenswrapper[4975]: I0126 00:25:10.481243 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:25:10 crc kubenswrapper[4975]: I0126 00:25:10.481542 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:25:17 crc kubenswrapper[4975]: I0126 00:25:17.164572 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" event={"ID":"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2","Type":"ContainerStarted","Data":"6aa373af24837e726132b1429f88162c40c4d5ba919c5862da2afa8c86d6e2a8"} Jan 26 00:25:17 crc kubenswrapper[4975]: I0126 00:25:17.183462 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" podStartSLOduration=2.613473912 podStartE2EDuration="13.183443685s" podCreationTimestamp="2026-01-26 00:25:04 +0000 UTC" firstStartedPulling="2026-01-26 00:25:05.602626858 +0000 UTC m=+1089.723832362" lastFinishedPulling="2026-01-26 00:25:16.172596641 +0000 UTC m=+1100.293802135" observedRunningTime="2026-01-26 00:25:17.180895474 +0000 UTC m=+1101.302100978" watchObservedRunningTime="2026-01-26 00:25:17.183443685 +0000 UTC m=+1101.304649179" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.810376 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.811903 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.817048 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.819228 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.819269 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-1" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.819288 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.819310 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.819293 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.820571 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.820808 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-2" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.820887 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.825020 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-6rdfx" Jan 26 00:25:18 crc kubenswrapper[4975]: I0126 00:25:18.836506 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.006946 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007000 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007080 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-498a1683-8730-48a5-a574-86461fc06815\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-498a1683-8730-48a5-a574-86461fc06815\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007116 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007168 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007199 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007320 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-tls-assets\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007402 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-web-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007553 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config-out\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007577 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007620 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wkkx\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-kube-api-access-2wkkx\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.007676 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.109503 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.109914 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.109959 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110013 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-498a1683-8730-48a5-a574-86461fc06815\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-498a1683-8730-48a5-a574-86461fc06815\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110051 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110096 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110131 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: E0126 00:25:19.110141 4975 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110164 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-tls-assets\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110195 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-web-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: E0126 00:25:19.110223 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls podName:a705e25a-ff6d-4954-8c70-ae6d973c3d0e nodeName:}" failed. No retries permitted until 2026-01-26 00:25:19.610204443 +0000 UTC m=+1103.731409927 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "a705e25a-ff6d-4954-8c70-ae6d973c3d0e") : secret "default-prometheus-proxy-tls" not found Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110263 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110295 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config-out\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.110335 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wkkx\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-kube-api-access-2wkkx\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.111175 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.111582 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.111891 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.112165 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.113223 4975 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.113293 4975 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-498a1683-8730-48a5-a574-86461fc06815\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-498a1683-8730-48a5-a574-86461fc06815\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/19ed69f028a924b1254c8b213cc84d76de25d01f27015237668c08c3e5f526e0/globalmount\"" pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.115840 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-web-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.115946 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.116215 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-tls-assets\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.122213 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.125801 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-config-out\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.129588 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wkkx\" (UniqueName: \"kubernetes.io/projected/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-kube-api-access-2wkkx\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.149913 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-498a1683-8730-48a5-a574-86461fc06815\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-498a1683-8730-48a5-a574-86461fc06815\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: I0126 00:25:19.617977 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:19 crc kubenswrapper[4975]: E0126 00:25:19.618505 4975 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 26 00:25:19 crc kubenswrapper[4975]: E0126 00:25:19.618654 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls podName:a705e25a-ff6d-4954-8c70-ae6d973c3d0e nodeName:}" failed. No retries permitted until 2026-01-26 00:25:20.61863263 +0000 UTC m=+1104.739838124 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "a705e25a-ff6d-4954-8c70-ae6d973c3d0e") : secret "default-prometheus-proxy-tls" not found Jan 26 00:25:20 crc kubenswrapper[4975]: I0126 00:25:20.634180 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:20 crc kubenswrapper[4975]: I0126 00:25:20.639495 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/a705e25a-ff6d-4954-8c70-ae6d973c3d0e-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"a705e25a-ff6d-4954-8c70-ae6d973c3d0e\") " pod="service-telemetry/prometheus-default-0" Jan 26 00:25:20 crc kubenswrapper[4975]: I0126 00:25:20.926761 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 26 00:25:21 crc kubenswrapper[4975]: I0126 00:25:21.199050 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 26 00:25:22 crc kubenswrapper[4975]: I0126 00:25:22.196396 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerStarted","Data":"1a16bc8c9db4f183880aefdcda0588d47751fa5f17e52da7f9651fe13cebcabd"} Jan 26 00:25:25 crc kubenswrapper[4975]: I0126 00:25:25.217984 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerStarted","Data":"de2f78ffbc748cb29ed84f44976e29455ff65021f2288fa209982d11b8abbeac"} Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.298835 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7"] Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.300206 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.307057 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7"] Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.486829 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg48q\" (UniqueName: \"kubernetes.io/projected/cc95813a-9cba-437a-bce6-98252790efed-kube-api-access-dg48q\") pod \"default-snmp-webhook-78bcbbdcff-nt9q7\" (UID: \"cc95813a-9cba-437a-bce6-98252790efed\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.588790 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg48q\" (UniqueName: \"kubernetes.io/projected/cc95813a-9cba-437a-bce6-98252790efed-kube-api-access-dg48q\") pod \"default-snmp-webhook-78bcbbdcff-nt9q7\" (UID: \"cc95813a-9cba-437a-bce6-98252790efed\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.617465 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg48q\" (UniqueName: \"kubernetes.io/projected/cc95813a-9cba-437a-bce6-98252790efed-kube-api-access-dg48q\") pod \"default-snmp-webhook-78bcbbdcff-nt9q7\" (UID: \"cc95813a-9cba-437a-bce6-98252790efed\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" Jan 26 00:25:29 crc kubenswrapper[4975]: I0126 00:25:29.916660 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" Jan 26 00:25:30 crc kubenswrapper[4975]: I0126 00:25:30.327960 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7"] Jan 26 00:25:31 crc kubenswrapper[4975]: I0126 00:25:31.258168 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" event={"ID":"cc95813a-9cba-437a-bce6-98252790efed","Type":"ContainerStarted","Data":"59e339daa47ef24ad0eb597c751da3609d0a70b37deb00dae1514926ceee9f48"} Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.266556 4975 generic.go:334] "Generic (PLEG): container finished" podID="a705e25a-ff6d-4954-8c70-ae6d973c3d0e" containerID="de2f78ffbc748cb29ed84f44976e29455ff65021f2288fa209982d11b8abbeac" exitCode=0 Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.266644 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerDied","Data":"de2f78ffbc748cb29ed84f44976e29455ff65021f2288fa209982d11b8abbeac"} Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.869168 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.871220 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.875658 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.876143 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-q6xth" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.876422 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.876701 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.876898 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.899078 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.908720 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.938859 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-web-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.938924 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.938966 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-tls-assets\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939006 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939059 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jrhbf\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-kube-api-access-jrhbf\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939085 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939152 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2ca91482-436e-48bf-8d3b-256ab164a837-config-out\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939500 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-config-volume\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:32 crc kubenswrapper[4975]: I0126 00:25:32.939583 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-452d88e9-7b35-4976-8439-657f01ebf12e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-452d88e9-7b35-4976-8439-657f01ebf12e\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.050862 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-tls-assets\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051007 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051063 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jrhbf\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-kube-api-access-jrhbf\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051094 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051164 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2ca91482-436e-48bf-8d3b-256ab164a837-config-out\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051199 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-config-volume\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051231 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-452d88e9-7b35-4976-8439-657f01ebf12e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-452d88e9-7b35-4976-8439-657f01ebf12e\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051270 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-web-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.051297 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: E0126 00:25:33.051515 4975 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:33 crc kubenswrapper[4975]: E0126 00:25:33.051613 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls podName:2ca91482-436e-48bf-8d3b-256ab164a837 nodeName:}" failed. No retries permitted until 2026-01-26 00:25:33.551578547 +0000 UTC m=+1117.672784041 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "2ca91482-436e-48bf-8d3b-256ab164a837") : secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.421770 4975 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.421824 4975 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-452d88e9-7b35-4976-8439-657f01ebf12e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-452d88e9-7b35-4976-8439-657f01ebf12e\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3955b105a927967138ec564a9f2ef22eebd98c58598b38390125e167551d8532/globalmount\"" pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.430135 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-tls-assets\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.454863 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.480586 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/2ca91482-436e-48bf-8d3b-256ab164a837-config-out\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.482064 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.486440 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-web-config\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.487902 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-config-volume\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.490256 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jrhbf\" (UniqueName: \"kubernetes.io/projected/2ca91482-436e-48bf-8d3b-256ab164a837-kube-api-access-jrhbf\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.523262 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-452d88e9-7b35-4976-8439-657f01ebf12e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-452d88e9-7b35-4976-8439-657f01ebf12e\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: I0126 00:25:33.615384 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:33 crc kubenswrapper[4975]: E0126 00:25:33.615716 4975 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:33 crc kubenswrapper[4975]: E0126 00:25:33.615808 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls podName:2ca91482-436e-48bf-8d3b-256ab164a837 nodeName:}" failed. No retries permitted until 2026-01-26 00:25:34.615785969 +0000 UTC m=+1118.736991463 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "2ca91482-436e-48bf-8d3b-256ab164a837") : secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:34 crc kubenswrapper[4975]: I0126 00:25:34.703590 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:34 crc kubenswrapper[4975]: E0126 00:25:34.703841 4975 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:34 crc kubenswrapper[4975]: E0126 00:25:34.704269 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls podName:2ca91482-436e-48bf-8d3b-256ab164a837 nodeName:}" failed. No retries permitted until 2026-01-26 00:25:36.704243115 +0000 UTC m=+1120.825448609 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "2ca91482-436e-48bf-8d3b-256ab164a837") : secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:36 crc kubenswrapper[4975]: I0126 00:25:36.747637 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:36 crc kubenswrapper[4975]: E0126 00:25:36.747880 4975 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:36 crc kubenswrapper[4975]: E0126 00:25:36.748329 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls podName:2ca91482-436e-48bf-8d3b-256ab164a837 nodeName:}" failed. No retries permitted until 2026-01-26 00:25:40.748300205 +0000 UTC m=+1124.869505699 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "2ca91482-436e-48bf-8d3b-256ab164a837") : secret "default-alertmanager-proxy-tls" not found Jan 26 00:25:40 crc kubenswrapper[4975]: I0126 00:25:40.481936 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:25:40 crc kubenswrapper[4975]: I0126 00:25:40.482530 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:25:40 crc kubenswrapper[4975]: I0126 00:25:40.791014 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:40 crc kubenswrapper[4975]: I0126 00:25:40.797958 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/2ca91482-436e-48bf-8d3b-256ab164a837-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"2ca91482-436e-48bf-8d3b-256ab164a837\") " pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:41 crc kubenswrapper[4975]: I0126 00:25:41.004117 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.212356 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j"] Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.214107 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.222410 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.228682 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.230690 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-vhxzc" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.231284 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.236439 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j"] Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.412639 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.412718 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l6mt\" (UniqueName: \"kubernetes.io/projected/0d7d25f6-4c23-4290-a03c-73691e6237a4-kube-api-access-6l6mt\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.412775 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.412833 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0d7d25f6-4c23-4290-a03c-73691e6237a4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.412853 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0d7d25f6-4c23-4290-a03c-73691e6237a4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.514248 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0d7d25f6-4c23-4290-a03c-73691e6237a4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.514312 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0d7d25f6-4c23-4290-a03c-73691e6237a4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.514384 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.514451 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l6mt\" (UniqueName: \"kubernetes.io/projected/0d7d25f6-4c23-4290-a03c-73691e6237a4-kube-api-access-6l6mt\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.514518 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.515271 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0d7d25f6-4c23-4290-a03c-73691e6237a4-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.515356 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.515412 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls podName:0d7d25f6-4c23-4290-a03c-73691e6237a4 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:00.015394309 +0000 UTC m=+1144.136599803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" (UID: "0d7d25f6-4c23-4290-a03c-73691e6237a4") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.516172 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0d7d25f6-4c23-4290-a03c-73691e6237a4-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.521157 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: I0126 00:25:59.540697 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l6mt\" (UniqueName: \"kubernetes.io/projected/0d7d25f6-4c23-4290-a03c-73691e6237a4-kube-api-access-6l6mt\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.761248 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/prometheus-webhook-snmp:latest" Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.761415 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus-webhook-snmp,Image:quay.io/infrawatch/prometheus-webhook-snmp:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:,HostPort:0,ContainerPort:9099,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:SNMP_COMMUNITY,Value:public,ValueFrom:nil,},EnvVar{Name:SNMP_RETRIES,Value:5,ValueFrom:nil,},EnvVar{Name:SNMP_HOST,Value:192.168.24.254,ValueFrom:nil,},EnvVar{Name:SNMP_PORT,Value:162,ValueFrom:nil,},EnvVar{Name:SNMP_TIMEOUT,Value:1,ValueFrom:nil,},EnvVar{Name:ALERT_OID_LABEL,Value:oid,ValueFrom:nil,},EnvVar{Name:TRAP_OID_PREFIX,Value:1.3.6.1.4.1.50495.15,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_OID,Value:1.3.6.1.4.1.50495.15.1.2.1,ValueFrom:nil,},EnvVar{Name:TRAP_DEFAULT_SEVERITY,Value:,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dg48q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-snmp-webhook-78bcbbdcff-nt9q7_service-telemetry(cc95813a-9cba-437a-bce6-98252790efed): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.762632 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" podUID="cc95813a-9cba-437a-bce6-98252790efed" Jan 26 00:25:59 crc kubenswrapper[4975]: E0126 00:25:59.939989 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus-webhook-snmp\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/prometheus-webhook-snmp:latest\\\"\"" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" podUID="cc95813a-9cba-437a-bce6-98252790efed" Jan 26 00:26:00 crc kubenswrapper[4975]: E0126 00:26:00.023855 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="quay.io/prometheus/prometheus:latest" Jan 26 00:26:00 crc kubenswrapper[4975]: E0126 00:26:00.024149 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:prometheus,Image:quay.io/prometheus/prometheus:latest,Command:[],Args:[--config.file=/etc/prometheus/config_out/prometheus.env.yaml --web.enable-lifecycle --web.route-prefix=/ --web.listen-address=127.0.0.1:9090 --storage.tsdb.retention.time=24h --storage.tsdb.path=/prometheus --web.config.file=/etc/prometheus/web_config/web-config.yaml],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-out,ReadOnly:true,MountPath:/etc/prometheus/config_out,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tls-assets,ReadOnly:true,MountPath:/etc/prometheus/certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-db,ReadOnly:false,MountPath:/prometheus,SubPath:prometheus-db,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secret-default-prometheus-proxy-tls,ReadOnly:true,MountPath:/etc/prometheus/secrets/default-prometheus-proxy-tls,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:secret-default-session-secret,ReadOnly:true,MountPath:/etc/prometheus/secrets/default-session-secret,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:configmap-serving-certs-ca-bundle,ReadOnly:true,MountPath:/etc/prometheus/configmaps/serving-certs-ca-bundle,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-rulefiles-0,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-default-rulefiles-0,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-rulefiles-1,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-default-rulefiles-1,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:prometheus-default-rulefiles-2,ReadOnly:true,MountPath:/etc/prometheus/rules/prometheus-default-rulefiles-2,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:web-config,ReadOnly:true,MountPath:/etc/prometheus/web_config/web-config.yaml,SubPath:web-config.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2wkkx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/healthy; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/healthy; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:6,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/ready; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/ready; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[sh -c if [ -x \"$(command -v curl)\" ]; then exec curl --fail http://localhost:9090/-/ready; elif [ -x \"$(command -v wget)\" ]; then exec wget -q -O /dev/null http://localhost:9090/-/ready; else exit 1; fi],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:3,PeriodSeconds:15,SuccessThreshold:1,FailureThreshold:60,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod prometheus-default-0_service-telemetry(a705e25a-ff6d-4954-8c70-ae6d973c3d0e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 26 00:26:00 crc kubenswrapper[4975]: I0126 00:26:00.032071 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:26:00 crc kubenswrapper[4975]: E0126 00:26:00.032260 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 26 00:26:00 crc kubenswrapper[4975]: E0126 00:26:00.032312 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls podName:0d7d25f6-4c23-4290-a03c-73691e6237a4 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:01.032294376 +0000 UTC m=+1145.153499870 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" (UID: "0d7d25f6-4c23-4290-a03c-73691e6237a4") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 26 00:26:00 crc kubenswrapper[4975]: I0126 00:26:00.083796 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 26 00:26:00 crc kubenswrapper[4975]: W0126 00:26:00.089889 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ca91482_436e_48bf_8d3b_256ab164a837.slice/crio-a4d9e970fe0cc37facf95afafa23b2d76dc8fcc067594cb985813a3914553de2 WatchSource:0}: Error finding container a4d9e970fe0cc37facf95afafa23b2d76dc8fcc067594cb985813a3914553de2: Status 404 returned error can't find the container with id a4d9e970fe0cc37facf95afafa23b2d76dc8fcc067594cb985813a3914553de2 Jan 26 00:26:00 crc kubenswrapper[4975]: I0126 00:26:00.939461 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerStarted","Data":"a4d9e970fe0cc37facf95afafa23b2d76dc8fcc067594cb985813a3914553de2"} Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.060276 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.072750 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/0d7d25f6-4c23-4290-a03c-73691e6237a4-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j\" (UID: \"0d7d25f6-4c23-4290-a03c-73691e6237a4\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.332158 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.655536 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j"] Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.688257 4975 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 00:26:01 crc kubenswrapper[4975]: I0126 00:26:01.959378 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"07cf03c65ddfd68fd1e3a242c8d985bbafea47f78e543feccc3e59d1338a3616"} Jan 26 00:26:02 crc kubenswrapper[4975]: I0126 00:26:02.969780 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerStarted","Data":"e25a412745e8f51935dfa1b76175c032b3a503a4cc1f967455113f1b8eb5bde9"} Jan 26 00:26:02 crc kubenswrapper[4975]: I0126 00:26:02.972212 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerStarted","Data":"1a695fd7fd0cdad9d1bce5b3f10d28858001fb37be2609a0ea508bab1f862cd2"} Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.320291 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg"] Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.321765 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.324119 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.324337 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.340356 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg"] Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.402780 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.402870 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.402905 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt6hv\" (UniqueName: \"kubernetes.io/projected/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-kube-api-access-vt6hv\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.402980 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.403170 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.504296 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.504405 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.504465 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.504502 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.504553 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt6hv\" (UniqueName: \"kubernetes.io/projected/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-kube-api-access-vt6hv\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: E0126 00:26:03.505016 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 26 00:26:03 crc kubenswrapper[4975]: E0126 00:26:03.505096 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls podName:26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:04.00507564 +0000 UTC m=+1148.126281134 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" (UID: "26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.505279 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.505854 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.528284 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:03 crc kubenswrapper[4975]: I0126 00:26:03.540870 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt6hv\" (UniqueName: \"kubernetes.io/projected/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-kube-api-access-vt6hv\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:04 crc kubenswrapper[4975]: I0126 00:26:04.032716 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:04 crc kubenswrapper[4975]: E0126 00:26:04.032922 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 26 00:26:04 crc kubenswrapper[4975]: E0126 00:26:04.032973 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls podName:26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:05.032959126 +0000 UTC m=+1149.154164620 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" (UID: "26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 26 00:26:05 crc kubenswrapper[4975]: I0126 00:26:05.050121 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:05 crc kubenswrapper[4975]: I0126 00:26:05.053440 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg\" (UID: \"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:05 crc kubenswrapper[4975]: I0126 00:26:05.163370 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.317456 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s"] Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.319173 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.373987 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.374188 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.429401 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.429489 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.429533 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.429572 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.429688 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw4lv\" (UniqueName: \"kubernetes.io/projected/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-kube-api-access-hw4lv\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.485853 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s"] Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.534830 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.534912 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.534998 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw4lv\" (UniqueName: \"kubernetes.io/projected/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-kube-api-access-hw4lv\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.535068 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.535121 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.537934 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.538601 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: E0126 00:26:08.538694 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 26 00:26:08 crc kubenswrapper[4975]: E0126 00:26:08.538766 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls podName:5189d2e4-ce9c-4cb8-955a-22b1edde1b70 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:09.03875098 +0000 UTC m=+1153.159956474 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" (UID: "5189d2e4-ce9c-4cb8-955a-22b1edde1b70") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.562991 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw4lv\" (UniqueName: \"kubernetes.io/projected/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-kube-api-access-hw4lv\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:08 crc kubenswrapper[4975]: I0126 00:26:08.570587 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:09 crc kubenswrapper[4975]: I0126 00:26:09.063534 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:09 crc kubenswrapper[4975]: E0126 00:26:09.063828 4975 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 26 00:26:09 crc kubenswrapper[4975]: E0126 00:26:09.063910 4975 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls podName:5189d2e4-ce9c-4cb8-955a-22b1edde1b70 nodeName:}" failed. No retries permitted until 2026-01-26 00:26:10.063886749 +0000 UTC m=+1154.185092243 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" (UID: "5189d2e4-ce9c-4cb8-955a-22b1edde1b70") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.073267 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.087545 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5189d2e4-ce9c-4cb8-955a-22b1edde1b70-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s\" (UID: \"5189d2e4-ce9c-4cb8-955a-22b1edde1b70\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.307906 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.481717 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.481844 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.481926 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.482437 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:26:10 crc kubenswrapper[4975]: I0126 00:26:10.482493 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8" gracePeriod=600 Jan 26 00:26:11 crc kubenswrapper[4975]: I0126 00:26:11.097382 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8" exitCode=0 Jan 26 00:26:11 crc kubenswrapper[4975]: I0126 00:26:11.099460 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8"} Jan 26 00:26:11 crc kubenswrapper[4975]: I0126 00:26:11.099772 4975 scope.go:117] "RemoveContainer" containerID="1ff2c84d111b05e92d29b652399f7326ae273accdab83024597003719d8b7515" Jan 26 00:26:13 crc kubenswrapper[4975]: E0126 00:26:13.606101 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"prometheus\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="service-telemetry/prometheus-default-0" podUID="a705e25a-ff6d-4954-8c70-ae6d973c3d0e" Jan 26 00:26:13 crc kubenswrapper[4975]: I0126 00:26:13.614857 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg"] Jan 26 00:26:13 crc kubenswrapper[4975]: I0126 00:26:13.653802 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s"] Jan 26 00:26:13 crc kubenswrapper[4975]: W0126 00:26:13.659271 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5189d2e4_ce9c_4cb8_955a_22b1edde1b70.slice/crio-04aec40a866fdf177941f4e51047b778b220070069fd9d07f145c2507d8b1345 WatchSource:0}: Error finding container 04aec40a866fdf177941f4e51047b778b220070069fd9d07f145c2507d8b1345: Status 404 returned error can't find the container with id 04aec40a866fdf177941f4e51047b778b220070069fd9d07f145c2507d8b1345 Jan 26 00:26:14 crc kubenswrapper[4975]: I0126 00:26:14.247895 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"e30b91d168c9637171bb28e612e5b862dc84c93ad0d0ba2b72ff06705062dd0e"} Jan 26 00:26:14 crc kubenswrapper[4975]: I0126 00:26:14.249845 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5"} Jan 26 00:26:14 crc kubenswrapper[4975]: I0126 00:26:14.257194 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerStarted","Data":"efdd66402125b26e12d48ac3ce58c319879732cb0364ab6ad0023031cacd05a0"} Jan 26 00:26:14 crc kubenswrapper[4975]: I0126 00:26:14.268869 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"04aec40a866fdf177941f4e51047b778b220070069fd9d07f145c2507d8b1345"} Jan 26 00:26:14 crc kubenswrapper[4975]: I0126 00:26:14.279440 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"30b9744500c823b99be9e9e9e63473e943e052af8b9b08cb4a01f35b21d8f623"} Jan 26 00:26:15 crc kubenswrapper[4975]: I0126 00:26:15.289450 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"06d1cac14f3163c2f6ff649ad1bb26f2cdcf12db446cfa75fc3be1d6fedc363d"} Jan 26 00:26:15 crc kubenswrapper[4975]: I0126 00:26:15.291360 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"cb6b6d91a6c123ce4a4ed58b28b3832338320a661353fb22150174345738da6e"} Jan 26 00:26:16 crc kubenswrapper[4975]: I0126 00:26:16.301524 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" event={"ID":"cc95813a-9cba-437a-bce6-98252790efed","Type":"ContainerStarted","Data":"762104827efa167d1f55665fb45e3e03c47eb08556f3d12e884dd1582a6c9dab"} Jan 26 00:26:16 crc kubenswrapper[4975]: I0126 00:26:16.318697 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"a705e25a-ff6d-4954-8c70-ae6d973c3d0e","Type":"ContainerStarted","Data":"0db5170e209fb7d35b45c7ea6fc2a44ceacbc8917590b579859fdfe6dd16f2c6"} Jan 26 00:26:16 crc kubenswrapper[4975]: I0126 00:26:16.325332 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-nt9q7" podStartSLOduration=1.833438853 podStartE2EDuration="47.325296665s" podCreationTimestamp="2026-01-26 00:25:29 +0000 UTC" firstStartedPulling="2026-01-26 00:25:30.34038177 +0000 UTC m=+1114.461587264" lastFinishedPulling="2026-01-26 00:26:15.832239592 +0000 UTC m=+1159.953445076" observedRunningTime="2026-01-26 00:26:16.317415313 +0000 UTC m=+1160.438620807" watchObservedRunningTime="2026-01-26 00:26:16.325296665 +0000 UTC m=+1160.446502159" Jan 26 00:26:16 crc kubenswrapper[4975]: I0126 00:26:16.353171 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.6081444000000005 podStartE2EDuration="59.353154072s" podCreationTimestamp="2026-01-26 00:25:17 +0000 UTC" firstStartedPulling="2026-01-26 00:25:21.200416706 +0000 UTC m=+1105.321622200" lastFinishedPulling="2026-01-26 00:26:15.945426378 +0000 UTC m=+1160.066631872" observedRunningTime="2026-01-26 00:26:16.350569929 +0000 UTC m=+1160.471775463" watchObservedRunningTime="2026-01-26 00:26:16.353154072 +0000 UTC m=+1160.474359566" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.661545 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7"] Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.663010 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.670090 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7"] Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.670620 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.670779 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.763493 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/90b625d1-1889-4c96-a8ea-1b5f60915c53-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.763560 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/90b625d1-1889-4c96-a8ea-1b5f60915c53-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.763601 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/90b625d1-1889-4c96-a8ea-1b5f60915c53-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.764016 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmwkg\" (UniqueName: \"kubernetes.io/projected/90b625d1-1889-4c96-a8ea-1b5f60915c53-kube-api-access-mmwkg\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.865319 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmwkg\" (UniqueName: \"kubernetes.io/projected/90b625d1-1889-4c96-a8ea-1b5f60915c53-kube-api-access-mmwkg\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.865426 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/90b625d1-1889-4c96-a8ea-1b5f60915c53-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.865461 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/90b625d1-1889-4c96-a8ea-1b5f60915c53-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.865510 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/90b625d1-1889-4c96-a8ea-1b5f60915c53-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.866135 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/90b625d1-1889-4c96-a8ea-1b5f60915c53-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.866903 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/90b625d1-1889-4c96-a8ea-1b5f60915c53-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.883705 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/90b625d1-1889-4c96-a8ea-1b5f60915c53-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.883761 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmwkg\" (UniqueName: \"kubernetes.io/projected/90b625d1-1889-4c96-a8ea-1b5f60915c53-kube-api-access-mmwkg\") pod \"default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7\" (UID: \"90b625d1-1889-4c96-a8ea-1b5f60915c53\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:17 crc kubenswrapper[4975]: I0126 00:26:17.990014 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" Jan 26 00:26:18 crc kubenswrapper[4975]: I0126 00:26:18.706657 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7"] Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.691919 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq"] Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.693061 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.696177 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.712668 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq"] Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.864909 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/51e86811-c442-451e-b8b3-1680edf110e4-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.865030 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/51e86811-c442-451e-b8b3-1680edf110e4-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.865077 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zxd9\" (UniqueName: \"kubernetes.io/projected/51e86811-c442-451e-b8b3-1680edf110e4-kube-api-access-2zxd9\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.865195 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/51e86811-c442-451e-b8b3-1680edf110e4-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.966224 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/51e86811-c442-451e-b8b3-1680edf110e4-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.966277 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zxd9\" (UniqueName: \"kubernetes.io/projected/51e86811-c442-451e-b8b3-1680edf110e4-kube-api-access-2zxd9\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.966335 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/51e86811-c442-451e-b8b3-1680edf110e4-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.966387 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/51e86811-c442-451e-b8b3-1680edf110e4-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.967044 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/51e86811-c442-451e-b8b3-1680edf110e4-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.967289 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/51e86811-c442-451e-b8b3-1680edf110e4-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.984975 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zxd9\" (UniqueName: \"kubernetes.io/projected/51e86811-c442-451e-b8b3-1680edf110e4-kube-api-access-2zxd9\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:19 crc kubenswrapper[4975]: I0126 00:26:19.988239 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/51e86811-c442-451e-b8b3-1680edf110e4-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq\" (UID: \"51e86811-c442-451e-b8b3-1680edf110e4\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.015896 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.429337 4975 generic.go:334] "Generic (PLEG): container finished" podID="2ca91482-436e-48bf-8d3b-256ab164a837" containerID="e25a412745e8f51935dfa1b76175c032b3a503a4cc1f967455113f1b8eb5bde9" exitCode=0 Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.429408 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerDied","Data":"e25a412745e8f51935dfa1b76175c032b3a503a4cc1f967455113f1b8eb5bde9"} Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.927367 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.927947 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Jan 26 00:26:20 crc kubenswrapper[4975]: I0126 00:26:20.968950 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Jan 26 00:26:21 crc kubenswrapper[4975]: I0126 00:26:21.494957 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.402447 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq"] Jan 26 00:26:25 crc kubenswrapper[4975]: W0126 00:26:25.414053 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod51e86811_c442_451e_b8b3_1680edf110e4.slice/crio-52202f27a46c8a42a988cbffcdb78cdb4d99b8fe6a6ae27792aa4b20754c533f WatchSource:0}: Error finding container 52202f27a46c8a42a988cbffcdb78cdb4d99b8fe6a6ae27792aa4b20754c533f: Status 404 returned error can't find the container with id 52202f27a46c8a42a988cbffcdb78cdb4d99b8fe6a6ae27792aa4b20754c533f Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.493976 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"754ca9ba1b548e98d74f1f41cc223829a81c7c38d37876685436e63a28bfdde8"} Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.497028 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerStarted","Data":"52202f27a46c8a42a988cbffcdb78cdb4d99b8fe6a6ae27792aa4b20754c533f"} Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.498956 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"646e14dcbf10b2688d6e44c11907d9a3c22634753ce3e68966d8be3fe7cec213"} Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.501515 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"de2868bbaeab4c51e4628a644966919fbd0c4d1233ebd51018b9f47f0218b67d"} Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.502653 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerStarted","Data":"8a402716c5fc389928e160660a86efead267641c09b7a422d548d1201bfdcdc1"} Jan 26 00:26:25 crc kubenswrapper[4975]: I0126 00:26:25.502695 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerStarted","Data":"f0ac72ea0dfa7942b1c2a127bb528614af3e67a97c9cf10c3f4498200568f8d8"} Jan 26 00:26:28 crc kubenswrapper[4975]: I0126 00:26:28.541781 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerStarted","Data":"4018354db716bd364be0ca972b41a020418b984f2408b0e518160dc41eac3936"} Jan 26 00:26:28 crc kubenswrapper[4975]: I0126 00:26:28.548586 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerStarted","Data":"357c79f33f446bea74374c2f2cfe3c3e9a5ae14b815b3d77b4a4e238c3d358b4"} Jan 26 00:26:38 crc kubenswrapper[4975]: I0126 00:26:38.665653 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:26:38 crc kubenswrapper[4975]: I0126 00:26:38.666910 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" podUID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" containerName="default-interconnect" containerID="cri-o://6aa373af24837e726132b1429f88162c40c4d5ba919c5862da2afa8c86d6e2a8" gracePeriod=30 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.342648 4975 generic.go:334] "Generic (PLEG): container finished" podID="90b625d1-1889-4c96-a8ea-1b5f60915c53" containerID="8a402716c5fc389928e160660a86efead267641c09b7a422d548d1201bfdcdc1" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.342887 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerDied","Data":"8a402716c5fc389928e160660a86efead267641c09b7a422d548d1201bfdcdc1"} Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.345236 4975 generic.go:334] "Generic (PLEG): container finished" podID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" containerID="754ca9ba1b548e98d74f1f41cc223829a81c7c38d37876685436e63a28bfdde8" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.345312 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerDied","Data":"754ca9ba1b548e98d74f1f41cc223829a81c7c38d37876685436e63a28bfdde8"} Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.346823 4975 generic.go:334] "Generic (PLEG): container finished" podID="51e86811-c442-451e-b8b3-1680edf110e4" containerID="357c79f33f446bea74374c2f2cfe3c3e9a5ae14b815b3d77b4a4e238c3d358b4" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.346899 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerDied","Data":"357c79f33f446bea74374c2f2cfe3c3e9a5ae14b815b3d77b4a4e238c3d358b4"} Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.349526 4975 generic.go:334] "Generic (PLEG): container finished" podID="5189d2e4-ce9c-4cb8-955a-22b1edde1b70" containerID="646e14dcbf10b2688d6e44c11907d9a3c22634753ce3e68966d8be3fe7cec213" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.349581 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerDied","Data":"646e14dcbf10b2688d6e44c11907d9a3c22634753ce3e68966d8be3fe7cec213"} Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.352429 4975 generic.go:334] "Generic (PLEG): container finished" podID="0d7d25f6-4c23-4290-a03c-73691e6237a4" containerID="de2868bbaeab4c51e4628a644966919fbd0c4d1233ebd51018b9f47f0218b67d" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.352475 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerDied","Data":"de2868bbaeab4c51e4628a644966919fbd0c4d1233ebd51018b9f47f0218b67d"} Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.354359 4975 generic.go:334] "Generic (PLEG): container finished" podID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" containerID="6aa373af24837e726132b1429f88162c40c4d5ba919c5862da2afa8c86d6e2a8" exitCode=0 Jan 26 00:26:41 crc kubenswrapper[4975]: I0126 00:26:40.354395 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" event={"ID":"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2","Type":"ContainerDied","Data":"6aa373af24837e726132b1429f88162c40c4d5ba919c5862da2afa8c86d6e2a8"} Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.785975 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:26:43 crc kubenswrapper[4975]: E0126 00:26:43.791678 4975 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/sg-core:latest" Jan 26 00:26:43 crc kubenswrapper[4975]: E0126 00:26:43.791868 4975 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:sg-core,Image:quay.io/infrawatch/sg-core:latest,Command:[],Args:[-config /etc/sg-core/sg-core.conf.yaml],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:prom-https,HostPort:0,ContainerPort:8083,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:MY_POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:socket-dir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:sg-core-config,ReadOnly:true,MountPath:/etc/sg-core/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vt6hv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg_service-telemetry(26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 26 00:26:43 crc kubenswrapper[4975]: E0126 00:26:43.795348 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" podUID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.813972 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814156 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814210 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814228 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814320 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w6tdn\" (UniqueName: \"kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814351 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.814403 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca\") pod \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\" (UID: \"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2\") " Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.823230 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.832770 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.842138 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.854252 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.854352 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nkbzp"] Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.854921 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: E0126 00:26:43.854965 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" containerName="default-interconnect" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.855081 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" containerName="default-interconnect" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.855490 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn" (OuterVolumeSpecName: "kube-api-access-w6tdn") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "kube-api-access-w6tdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.855548 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" containerName="default-interconnect" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.856525 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.858882 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" (UID: "c6d31f14-eac0-4ccb-9cf1-a03d729d18b2"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.881285 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nkbzp"] Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916113 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916201 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-config\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916254 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916289 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-users\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916317 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916356 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916383 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29q44\" (UniqueName: \"kubernetes.io/projected/a6053679-c924-4974-a98a-1e7e7a333e4d-kube-api-access-29q44\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916462 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w6tdn\" (UniqueName: \"kubernetes.io/projected/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-kube-api-access-w6tdn\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916481 4975 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916547 4975 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916562 4975 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916577 4975 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916592 4975 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-sasl-users\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:43 crc kubenswrapper[4975]: I0126 00:26:43.916608 4975 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017095 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017146 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-config\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017191 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017227 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017252 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-users\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017284 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.017308 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29q44\" (UniqueName: \"kubernetes.io/projected/a6053679-c924-4974-a98a-1e7e7a333e4d-kube-api-access-29q44\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.020386 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-config\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.024525 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.024525 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-sasl-users\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.024534 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.025813 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.027985 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/a6053679-c924-4974-a98a-1e7e7a333e4d-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.037002 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29q44\" (UniqueName: \"kubernetes.io/projected/a6053679-c924-4974-a98a-1e7e7a333e4d-kube-api-access-29q44\") pod \"default-interconnect-68864d46cb-nkbzp\" (UID: \"a6053679-c924-4974-a98a-1e7e7a333e4d\") " pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.211146 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.391498 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" event={"ID":"c6d31f14-eac0-4ccb-9cf1-a03d729d18b2","Type":"ContainerDied","Data":"ec75653d28dc0033cd1f184fd87e119088e1cb030cf7741f456a4d7ca7b8e637"} Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.392015 4975 scope.go:117] "RemoveContainer" containerID="6aa373af24837e726132b1429f88162c40c4d5ba919c5862da2afa8c86d6e2a8" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.391548 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-xmfhs" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.392589 4975 scope.go:117] "RemoveContainer" containerID="754ca9ba1b548e98d74f1f41cc223829a81c7c38d37876685436e63a28bfdde8" Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.472986 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-nkbzp"] Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.489884 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:26:44 crc kubenswrapper[4975]: I0126 00:26:44.517814 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-xmfhs"] Jan 26 00:26:45 crc kubenswrapper[4975]: E0126 00:26:45.269895 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" podUID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.400978 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"d92bb60151681f0aa0dae1c1be8453aa200bcac4a87f631fcb3da52bbb9d3171"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.401839 4975 scope.go:117] "RemoveContainer" containerID="de2868bbaeab4c51e4628a644966919fbd0c4d1233ebd51018b9f47f0218b67d" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.404108 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerStarted","Data":"36e7613f4805b9c30eca9ee76d51760a4bdb7f0638451799b144d5921534b464"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.404791 4975 scope.go:117] "RemoveContainer" containerID="8a402716c5fc389928e160660a86efead267641c09b7a422d548d1201bfdcdc1" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.407591 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"cd80d7a01aa9e69559d627844423582d35c4c2ffd7dec60e5bc046ddfc977cb6"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.409804 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" event={"ID":"a6053679-c924-4974-a98a-1e7e7a333e4d","Type":"ContainerStarted","Data":"f475f8662926952a40252cd556175f2f66f0cc26e50621f3053ca86e2ed75ab8"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.409846 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" event={"ID":"a6053679-c924-4974-a98a-1e7e7a333e4d","Type":"ContainerStarted","Data":"8398e74066881f2cdb4179906966e347edcff378b3ab39d6d3b7bf49c72b6483"} Jan 26 00:26:45 crc kubenswrapper[4975]: E0126 00:26:45.410905 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" podUID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.415049 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerStarted","Data":"f8a5c484357704388c0cc29d08c88b5cc8f883c03188b9673761b9eb2bf5ac88"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.415721 4975 scope.go:117] "RemoveContainer" containerID="357c79f33f446bea74374c2f2cfe3c3e9a5ae14b815b3d77b4a4e238c3d358b4" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.427727 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"31283fc1290ac1e6b94586eed4989250d52cd7765407ef1ff4a7aa7e959ac69d"} Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.428812 4975 scope.go:117] "RemoveContainer" containerID="646e14dcbf10b2688d6e44c11907d9a3c22634753ce3e68966d8be3fe7cec213" Jan 26 00:26:45 crc kubenswrapper[4975]: I0126 00:26:45.557256 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-nkbzp" podStartSLOduration=7.55723007 podStartE2EDuration="7.55723007s" podCreationTimestamp="2026-01-26 00:26:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:26:45.552641698 +0000 UTC m=+1189.673847192" watchObservedRunningTime="2026-01-26 00:26:45.55723007 +0000 UTC m=+1189.678435564" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.158575 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6d31f14-eac0-4ccb-9cf1-a03d729d18b2" path="/var/lib/kubelet/pods/c6d31f14-eac0-4ccb-9cf1-a03d729d18b2/volumes" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.442505 4975 generic.go:334] "Generic (PLEG): container finished" podID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" containerID="cd80d7a01aa9e69559d627844423582d35c4c2ffd7dec60e5bc046ddfc977cb6" exitCode=0 Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.442641 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerDied","Data":"cd80d7a01aa9e69559d627844423582d35c4c2ffd7dec60e5bc046ddfc977cb6"} Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.442713 4975 scope.go:117] "RemoveContainer" containerID="754ca9ba1b548e98d74f1f41cc223829a81c7c38d37876685436e63a28bfdde8" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.443958 4975 scope.go:117] "RemoveContainer" containerID="cd80d7a01aa9e69559d627844423582d35c4c2ffd7dec60e5bc046ddfc977cb6" Jan 26 00:26:46 crc kubenswrapper[4975]: E0126 00:26:46.448116 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg_service-telemetry(26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529)\", failed to \"StartContainer\" for \"sg-core\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/sg-core:latest\\\"\"]" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" podUID="26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.459411 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerStarted","Data":"5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a"} Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.469816 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f"} Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.486473 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b"} Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.495621 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerStarted","Data":"301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b"} Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.559508 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" podStartSLOduration=7.136247615 podStartE2EDuration="27.559481999s" podCreationTimestamp="2026-01-26 00:26:19 +0000 UTC" firstStartedPulling="2026-01-26 00:26:25.417613224 +0000 UTC m=+1169.538818718" lastFinishedPulling="2026-01-26 00:26:45.840847608 +0000 UTC m=+1189.962053102" observedRunningTime="2026-01-26 00:26:46.544420476 +0000 UTC m=+1190.665625970" watchObservedRunningTime="2026-01-26 00:26:46.559481999 +0000 UTC m=+1190.680687513" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.562068 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" podStartSLOduration=6.425111719 podStartE2EDuration="38.562054133s" podCreationTimestamp="2026-01-26 00:26:08 +0000 UTC" firstStartedPulling="2026-01-26 00:26:13.662548885 +0000 UTC m=+1157.783754379" lastFinishedPulling="2026-01-26 00:26:45.799491299 +0000 UTC m=+1189.920696793" observedRunningTime="2026-01-26 00:26:46.516158243 +0000 UTC m=+1190.637363777" watchObservedRunningTime="2026-01-26 00:26:46.562054133 +0000 UTC m=+1190.683259637" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.584630 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" podStartSLOduration=3.3725999509999998 podStartE2EDuration="47.584601332s" podCreationTimestamp="2026-01-26 00:25:59 +0000 UTC" firstStartedPulling="2026-01-26 00:26:01.687960158 +0000 UTC m=+1145.809165652" lastFinishedPulling="2026-01-26 00:26:45.899961539 +0000 UTC m=+1190.021167033" observedRunningTime="2026-01-26 00:26:46.578393793 +0000 UTC m=+1190.699599297" watchObservedRunningTime="2026-01-26 00:26:46.584601332 +0000 UTC m=+1190.705806846" Jan 26 00:26:46 crc kubenswrapper[4975]: I0126 00:26:46.615701 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" podStartSLOduration=8.351048161 podStartE2EDuration="29.615679846s" podCreationTimestamp="2026-01-26 00:26:17 +0000 UTC" firstStartedPulling="2026-01-26 00:26:24.598296198 +0000 UTC m=+1168.719501732" lastFinishedPulling="2026-01-26 00:26:45.862927923 +0000 UTC m=+1189.984133417" observedRunningTime="2026-01-26 00:26:46.611782354 +0000 UTC m=+1190.732987848" watchObservedRunningTime="2026-01-26 00:26:46.615679846 +0000 UTC m=+1190.736885350" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.513865 4975 generic.go:334] "Generic (PLEG): container finished" podID="51e86811-c442-451e-b8b3-1680edf110e4" containerID="5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a" exitCode=0 Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.513984 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerDied","Data":"5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a"} Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.514044 4975 scope.go:117] "RemoveContainer" containerID="357c79f33f446bea74374c2f2cfe3c3e9a5ae14b815b3d77b4a4e238c3d358b4" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.515727 4975 scope.go:117] "RemoveContainer" containerID="5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a" Jan 26 00:26:47 crc kubenswrapper[4975]: E0126 00:26:47.516307 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq_service-telemetry(51e86811-c442-451e-b8b3-1680edf110e4)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" podUID="51e86811-c442-451e-b8b3-1680edf110e4" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.520949 4975 generic.go:334] "Generic (PLEG): container finished" podID="5189d2e4-ce9c-4cb8-955a-22b1edde1b70" containerID="8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f" exitCode=0 Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.521021 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerDied","Data":"8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f"} Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.521664 4975 scope.go:117] "RemoveContainer" containerID="8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f" Jan 26 00:26:47 crc kubenswrapper[4975]: E0126 00:26:47.521879 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s_service-telemetry(5189d2e4-ce9c-4cb8-955a-22b1edde1b70)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" podUID="5189d2e4-ce9c-4cb8-955a-22b1edde1b70" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.527022 4975 generic.go:334] "Generic (PLEG): container finished" podID="0d7d25f6-4c23-4290-a03c-73691e6237a4" containerID="c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b" exitCode=0 Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.527087 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerDied","Data":"c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b"} Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.527678 4975 scope.go:117] "RemoveContainer" containerID="c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b" Jan 26 00:26:47 crc kubenswrapper[4975]: E0126 00:26:47.527912 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j_service-telemetry(0d7d25f6-4c23-4290-a03c-73691e6237a4)\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" podUID="0d7d25f6-4c23-4290-a03c-73691e6237a4" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.530447 4975 generic.go:334] "Generic (PLEG): container finished" podID="90b625d1-1889-4c96-a8ea-1b5f60915c53" containerID="301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b" exitCode=0 Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.530507 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerDied","Data":"301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b"} Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.530838 4975 scope.go:117] "RemoveContainer" containerID="301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b" Jan 26 00:26:47 crc kubenswrapper[4975]: E0126 00:26:47.531004 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7_service-telemetry(90b625d1-1889-4c96-a8ea-1b5f60915c53)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" podUID="90b625d1-1889-4c96-a8ea-1b5f60915c53" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.558759 4975 scope.go:117] "RemoveContainer" containerID="646e14dcbf10b2688d6e44c11907d9a3c22634753ce3e68966d8be3fe7cec213" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.613513 4975 scope.go:117] "RemoveContainer" containerID="de2868bbaeab4c51e4628a644966919fbd0c4d1233ebd51018b9f47f0218b67d" Jan 26 00:26:47 crc kubenswrapper[4975]: I0126 00:26:47.654487 4975 scope.go:117] "RemoveContainer" containerID="8a402716c5fc389928e160660a86efead267641c09b7a422d548d1201bfdcdc1" Jan 26 00:26:48 crc kubenswrapper[4975]: I0126 00:26:48.548778 4975 scope.go:117] "RemoveContainer" containerID="8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f" Jan 26 00:26:48 crc kubenswrapper[4975]: E0126 00:26:48.549488 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s_service-telemetry(5189d2e4-ce9c-4cb8-955a-22b1edde1b70)\"" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" podUID="5189d2e4-ce9c-4cb8-955a-22b1edde1b70" Jan 26 00:26:48 crc kubenswrapper[4975]: I0126 00:26:48.551285 4975 scope.go:117] "RemoveContainer" containerID="c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b" Jan 26 00:26:48 crc kubenswrapper[4975]: E0126 00:26:48.551449 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j_service-telemetry(0d7d25f6-4c23-4290-a03c-73691e6237a4)\"" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" podUID="0d7d25f6-4c23-4290-a03c-73691e6237a4" Jan 26 00:26:48 crc kubenswrapper[4975]: I0126 00:26:48.555555 4975 scope.go:117] "RemoveContainer" containerID="301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b" Jan 26 00:26:48 crc kubenswrapper[4975]: E0126 00:26:48.555768 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7_service-telemetry(90b625d1-1889-4c96-a8ea-1b5f60915c53)\"" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" podUID="90b625d1-1889-4c96-a8ea-1b5f60915c53" Jan 26 00:26:48 crc kubenswrapper[4975]: I0126 00:26:48.557527 4975 scope.go:117] "RemoveContainer" containerID="5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a" Jan 26 00:26:48 crc kubenswrapper[4975]: E0126 00:26:48.557687 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"bridge\" with CrashLoopBackOff: \"back-off 10s restarting failed container=bridge pod=default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq_service-telemetry(51e86811-c442-451e-b8b3-1680edf110e4)\"" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" podUID="51e86811-c442-451e-b8b3-1680edf110e4" Jan 26 00:26:53 crc kubenswrapper[4975]: I0126 00:26:53.595685 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerStarted","Data":"937513f1d62357cfcc3543964ba50eaa4780fe600b6ec752bb8fb4f33faecfd9"} Jan 26 00:26:53 crc kubenswrapper[4975]: I0126 00:26:53.596243 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"2ca91482-436e-48bf-8d3b-256ab164a837","Type":"ContainerStarted","Data":"d7c70b2160a69003ccd59cef7feb992e6f0f5ff3911dbfcee2d54ff9cd67e89b"} Jan 26 00:26:53 crc kubenswrapper[4975]: I0126 00:26:53.630575 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=49.885154875 podStartE2EDuration="1m22.630552842s" podCreationTimestamp="2026-01-26 00:25:31 +0000 UTC" firstStartedPulling="2026-01-26 00:26:20.431892727 +0000 UTC m=+1164.553098231" lastFinishedPulling="2026-01-26 00:26:53.177290704 +0000 UTC m=+1197.298496198" observedRunningTime="2026-01-26 00:26:53.625904729 +0000 UTC m=+1197.747110223" watchObservedRunningTime="2026-01-26 00:26:53.630552842 +0000 UTC m=+1197.751758336" Jan 26 00:26:59 crc kubenswrapper[4975]: I0126 00:26:59.148023 4975 scope.go:117] "RemoveContainer" containerID="cd80d7a01aa9e69559d627844423582d35c4c2ffd7dec60e5bc046ddfc977cb6" Jan 26 00:27:00 crc kubenswrapper[4975]: I0126 00:27:00.664906 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"c05d33344607ef888d978a602cdb9b9b20ce0d8b4a05b8f127c8f3a460d4ffbf"} Jan 26 00:27:01 crc kubenswrapper[4975]: I0126 00:27:01.148317 4975 scope.go:117] "RemoveContainer" containerID="c04993815f317f0e9ac125ea92854430de99f1e87c13c3309d33973e439be42b" Jan 26 00:27:01 crc kubenswrapper[4975]: I0126 00:27:01.674334 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" event={"ID":"26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529","Type":"ContainerStarted","Data":"4bebcaf44896bdb71a6502b5e826d26a4270b10c1a3da2e64ba5cd3357d4fcfc"} Jan 26 00:27:01 crc kubenswrapper[4975]: I0126 00:27:01.695608 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg" podStartSLOduration=11.833659 podStartE2EDuration="58.695588336s" podCreationTimestamp="2026-01-26 00:26:03 +0000 UTC" firstStartedPulling="2026-01-26 00:26:13.627031272 +0000 UTC m=+1157.748236756" lastFinishedPulling="2026-01-26 00:27:00.488960598 +0000 UTC m=+1204.610166092" observedRunningTime="2026-01-26 00:27:01.694128414 +0000 UTC m=+1205.815333908" watchObservedRunningTime="2026-01-26 00:27:01.695588336 +0000 UTC m=+1205.816793830" Jan 26 00:27:02 crc kubenswrapper[4975]: I0126 00:27:02.147111 4975 scope.go:117] "RemoveContainer" containerID="301715e1b4d64066eee1a581472d8f10d5ac80155f9e7f83a355f0d73365a86b" Jan 26 00:27:02 crc kubenswrapper[4975]: I0126 00:27:02.692264 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j" event={"ID":"0d7d25f6-4c23-4290-a03c-73691e6237a4","Type":"ContainerStarted","Data":"2739d61849ca4dc589a940ee80428b7127f26fa70b643af3fd9c1071e1798037"} Jan 26 00:27:03 crc kubenswrapper[4975]: I0126 00:27:03.147499 4975 scope.go:117] "RemoveContainer" containerID="8425d3a1916c0d0a36700d5bcb6a49430d15871461a43adc5f3fa4618e5b9e6f" Jan 26 00:27:03 crc kubenswrapper[4975]: I0126 00:27:03.700830 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7" event={"ID":"90b625d1-1889-4c96-a8ea-1b5f60915c53","Type":"ContainerStarted","Data":"4e2c4f159988b94ee147cac9dea48a62024faaa64bcb23f0adf20e7c46347173"} Jan 26 00:27:04 crc kubenswrapper[4975]: I0126 00:27:04.153023 4975 scope.go:117] "RemoveContainer" containerID="5cca765797ba13013cc4083f4bae2dc7d71e1214621805e75979a18ee058465a" Jan 26 00:27:04 crc kubenswrapper[4975]: I0126 00:27:04.710342 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s" event={"ID":"5189d2e4-ce9c-4cb8-955a-22b1edde1b70","Type":"ContainerStarted","Data":"abaa4c7547a0d194b5b1706cd6fa8afd46248ef47b78360d83b73ea8beb19128"} Jan 26 00:27:05 crc kubenswrapper[4975]: I0126 00:27:05.717824 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq" event={"ID":"51e86811-c442-451e-b8b3-1680edf110e4","Type":"ContainerStarted","Data":"d5f272f61ba251e7587fbd566ac35ab75c52eb451cef0698ea5f802142196e0e"} Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.486852 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.488523 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.494209 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.494624 4975 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.516360 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.600355 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/30b7c47f-532b-4984-9858-0df6cfcf30d5-qdr-test-config\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.600429 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nl8gd\" (UniqueName: \"kubernetes.io/projected/30b7c47f-532b-4984-9858-0df6cfcf30d5-kube-api-access-nl8gd\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.600717 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/30b7c47f-532b-4984-9858-0df6cfcf30d5-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.702947 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/30b7c47f-532b-4984-9858-0df6cfcf30d5-qdr-test-config\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.703011 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nl8gd\" (UniqueName: \"kubernetes.io/projected/30b7c47f-532b-4984-9858-0df6cfcf30d5-kube-api-access-nl8gd\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.703064 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/30b7c47f-532b-4984-9858-0df6cfcf30d5-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.704642 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/30b7c47f-532b-4984-9858-0df6cfcf30d5-qdr-test-config\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.711483 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/30b7c47f-532b-4984-9858-0df6cfcf30d5-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.719827 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nl8gd\" (UniqueName: \"kubernetes.io/projected/30b7c47f-532b-4984-9858-0df6cfcf30d5-kube-api-access-nl8gd\") pod \"qdr-test\" (UID: \"30b7c47f-532b-4984-9858-0df6cfcf30d5\") " pod="service-telemetry/qdr-test" Jan 26 00:27:17 crc kubenswrapper[4975]: I0126 00:27:17.807534 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 26 00:27:18 crc kubenswrapper[4975]: I0126 00:27:18.272187 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 26 00:27:18 crc kubenswrapper[4975]: I0126 00:27:18.832496 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"30b7c47f-532b-4984-9858-0df6cfcf30d5","Type":"ContainerStarted","Data":"67abc96ffb2dc7684cb81111c696a909360bce61aa18e10d5175281d64b60968"} Jan 26 00:27:25 crc kubenswrapper[4975]: I0126 00:27:25.887941 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"30b7c47f-532b-4984-9858-0df6cfcf30d5","Type":"ContainerStarted","Data":"c958d12200b29591f2ad4f1928ee4b681b28b980cc88f1e55954b6ddb09bbec2"} Jan 26 00:27:25 crc kubenswrapper[4975]: I0126 00:27:25.911155 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.62900595 podStartE2EDuration="8.911132713s" podCreationTimestamp="2026-01-26 00:27:17 +0000 UTC" firstStartedPulling="2026-01-26 00:27:18.286697083 +0000 UTC m=+1222.407902577" lastFinishedPulling="2026-01-26 00:27:25.568823846 +0000 UTC m=+1229.690029340" observedRunningTime="2026-01-26 00:27:25.902246647 +0000 UTC m=+1230.023452131" watchObservedRunningTime="2026-01-26 00:27:25.911132713 +0000 UTC m=+1230.032338207" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.193267 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-qzlg6"] Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.196299 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.199065 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.200097 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.200467 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.200504 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.200763 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.200602 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.210239 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-qzlg6"] Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.242995 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243039 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243069 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243164 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243215 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzdtx\" (UniqueName: \"kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243238 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.243745 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.344997 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345057 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345157 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345213 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345245 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzdtx\" (UniqueName: \"kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345261 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.345292 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.346083 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.346142 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.346668 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.346684 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.347073 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.347622 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.373080 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzdtx\" (UniqueName: \"kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx\") pod \"stf-smoketest-smoke1-qzlg6\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.517066 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.603637 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.604677 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.615894 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.652090 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9khw\" (UniqueName: \"kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw\") pod \"curl\" (UID: \"2b0c5893-e83d-4491-909b-1e2e122e57a9\") " pod="service-telemetry/curl" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.754615 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9khw\" (UniqueName: \"kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw\") pod \"curl\" (UID: \"2b0c5893-e83d-4491-909b-1e2e122e57a9\") " pod="service-telemetry/curl" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.805619 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9khw\" (UniqueName: \"kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw\") pod \"curl\" (UID: \"2b0c5893-e83d-4491-909b-1e2e122e57a9\") " pod="service-telemetry/curl" Jan 26 00:27:26 crc kubenswrapper[4975]: I0126 00:27:26.925326 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 26 00:27:27 crc kubenswrapper[4975]: I0126 00:27:27.001101 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-qzlg6"] Jan 26 00:27:27 crc kubenswrapper[4975]: I0126 00:27:27.124826 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 26 00:27:27 crc kubenswrapper[4975]: W0126 00:27:27.132987 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b0c5893_e83d_4491_909b_1e2e122e57a9.slice/crio-b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee WatchSource:0}: Error finding container b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee: Status 404 returned error can't find the container with id b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee Jan 26 00:27:27 crc kubenswrapper[4975]: I0126 00:27:27.918491 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerStarted","Data":"fe5f8da4f50373327a6e61f733991cf55e2270f75677fa0903a117dff6ec3b92"} Jan 26 00:27:27 crc kubenswrapper[4975]: I0126 00:27:27.920483 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2b0c5893-e83d-4491-909b-1e2e122e57a9","Type":"ContainerStarted","Data":"b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee"} Jan 26 00:27:29 crc kubenswrapper[4975]: I0126 00:27:29.943422 4975 generic.go:334] "Generic (PLEG): container finished" podID="2b0c5893-e83d-4491-909b-1e2e122e57a9" containerID="a5f6dde375d47716c304d7be7fe02e77b042e858b5b07752f12946461211a3ac" exitCode=0 Jan 26 00:27:29 crc kubenswrapper[4975]: I0126 00:27:29.943529 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2b0c5893-e83d-4491-909b-1e2e122e57a9","Type":"ContainerDied","Data":"a5f6dde375d47716c304d7be7fe02e77b042e858b5b07752f12946461211a3ac"} Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.150152 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.244997 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9khw\" (UniqueName: \"kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw\") pod \"2b0c5893-e83d-4491-909b-1e2e122e57a9\" (UID: \"2b0c5893-e83d-4491-909b-1e2e122e57a9\") " Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.252229 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw" (OuterVolumeSpecName: "kube-api-access-n9khw") pod "2b0c5893-e83d-4491-909b-1e2e122e57a9" (UID: "2b0c5893-e83d-4491-909b-1e2e122e57a9"). InnerVolumeSpecName "kube-api-access-n9khw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.334249 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_2b0c5893-e83d-4491-909b-1e2e122e57a9/curl/0.log" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.348110 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9khw\" (UniqueName: \"kubernetes.io/projected/2b0c5893-e83d-4491-909b-1e2e122e57a9-kube-api-access-n9khw\") on node \"crc\" DevicePath \"\"" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.631625 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-nt9q7_cc95813a-9cba-437a-bce6-98252790efed/prometheus-webhook-snmp/0.log" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.978454 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"2b0c5893-e83d-4491-909b-1e2e122e57a9","Type":"ContainerDied","Data":"b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee"} Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.978792 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9dad9c0e84e6961e2e5733250abf12101d237ad3b78300af3f4ef853afaa1ee" Jan 26 00:27:33 crc kubenswrapper[4975]: I0126 00:27:33.978538 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 26 00:27:38 crc kubenswrapper[4975]: I0126 00:27:38.011069 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerStarted","Data":"b7115c5c865671175e98732740623d1d925f0ca6a25ba9bcd684bee5e726e5d8"} Jan 26 00:27:46 crc kubenswrapper[4975]: I0126 00:27:46.086089 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerStarted","Data":"212c7c46ad78c537b4f1163b697c4a22bd00ff6b2259b74c3f2ae58c6a0d86a5"} Jan 26 00:28:03 crc kubenswrapper[4975]: I0126 00:28:03.766916 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-nt9q7_cc95813a-9cba-437a-bce6-98252790efed/prometheus-webhook-snmp/0.log" Jan 26 00:28:12 crc kubenswrapper[4975]: I0126 00:28:12.427843 4975 generic.go:334] "Generic (PLEG): container finished" podID="2d414802-10de-46cd-9c60-35f10ea75cec" containerID="b7115c5c865671175e98732740623d1d925f0ca6a25ba9bcd684bee5e726e5d8" exitCode=1 Jan 26 00:28:12 crc kubenswrapper[4975]: I0126 00:28:12.427952 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerDied","Data":"b7115c5c865671175e98732740623d1d925f0ca6a25ba9bcd684bee5e726e5d8"} Jan 26 00:28:12 crc kubenswrapper[4975]: I0126 00:28:12.430253 4975 scope.go:117] "RemoveContainer" containerID="b7115c5c865671175e98732740623d1d925f0ca6a25ba9bcd684bee5e726e5d8" Jan 26 00:28:17 crc kubenswrapper[4975]: I0126 00:28:17.483682 4975 generic.go:334] "Generic (PLEG): container finished" podID="2d414802-10de-46cd-9c60-35f10ea75cec" containerID="212c7c46ad78c537b4f1163b697c4a22bd00ff6b2259b74c3f2ae58c6a0d86a5" exitCode=1 Jan 26 00:28:17 crc kubenswrapper[4975]: I0126 00:28:17.483793 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerDied","Data":"212c7c46ad78c537b4f1163b697c4a22bd00ff6b2259b74c3f2ae58c6a0d86a5"} Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.830302 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931039 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931117 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzdtx\" (UniqueName: \"kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931161 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931193 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931272 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931311 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.931360 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script\") pod \"2d414802-10de-46cd-9c60-35f10ea75cec\" (UID: \"2d414802-10de-46cd-9c60-35f10ea75cec\") " Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.948769 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx" (OuterVolumeSpecName: "kube-api-access-fzdtx") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "kube-api-access-fzdtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.951144 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.959135 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.959166 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.959942 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.963550 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:18 crc kubenswrapper[4975]: I0126 00:28:18.965261 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "2d414802-10de-46cd-9c60-35f10ea75cec" (UID: "2d414802-10de-46cd-9c60-35f10ea75cec"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033388 4975 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033438 4975 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033448 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033461 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033473 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzdtx\" (UniqueName: \"kubernetes.io/projected/2d414802-10de-46cd-9c60-35f10ea75cec-kube-api-access-fzdtx\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033484 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.033494 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/2d414802-10de-46cd-9c60-35f10ea75cec-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.514871 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" event={"ID":"2d414802-10de-46cd-9c60-35f10ea75cec","Type":"ContainerDied","Data":"fe5f8da4f50373327a6e61f733991cf55e2270f75677fa0903a117dff6ec3b92"} Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.514927 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe5f8da4f50373327a6e61f733991cf55e2270f75677fa0903a117dff6ec3b92" Jan 26 00:28:19 crc kubenswrapper[4975]: I0126 00:28:19.515233 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-qzlg6" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.026442 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dwrsq"] Jan 26 00:28:26 crc kubenswrapper[4975]: E0126 00:28:26.029340 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b0c5893-e83d-4491-909b-1e2e122e57a9" containerName="curl" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029365 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b0c5893-e83d-4491-909b-1e2e122e57a9" containerName="curl" Jan 26 00:28:26 crc kubenswrapper[4975]: E0126 00:28:26.029386 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-ceilometer" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029394 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-ceilometer" Jan 26 00:28:26 crc kubenswrapper[4975]: E0126 00:28:26.029408 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-collectd" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029414 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-collectd" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029538 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-ceilometer" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029555 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d414802-10de-46cd-9c60-35f10ea75cec" containerName="smoketest-collectd" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.029568 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b0c5893-e83d-4491-909b-1e2e122e57a9" containerName="curl" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.030377 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.032544 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.032984 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.034986 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.035506 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.035660 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.035692 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.047166 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dwrsq"] Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.152955 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153004 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153037 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153063 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5fl2\" (UniqueName: \"kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153097 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153197 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.153385 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254795 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254881 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254903 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254933 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254956 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5fl2\" (UniqueName: \"kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.254999 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.255040 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.256075 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.256546 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.256612 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.256775 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.257138 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.257570 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.294999 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5fl2\" (UniqueName: \"kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2\") pod \"stf-smoketest-smoke1-dwrsq\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.367604 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:28:26 crc kubenswrapper[4975]: I0126 00:28:26.581509 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-dwrsq"] Jan 26 00:28:27 crc kubenswrapper[4975]: I0126 00:28:27.581724 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerStarted","Data":"adeb9c790c028a7480e0b78cdc5d489bf7d3e365d4f414a6dda0d89d9f23cf02"} Jan 26 00:28:27 crc kubenswrapper[4975]: I0126 00:28:27.581798 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerStarted","Data":"ddb94d0a64a592dae1b4a944b42a92c321df45bd346d3ed40831708df04fa1cb"} Jan 26 00:28:27 crc kubenswrapper[4975]: I0126 00:28:27.581816 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerStarted","Data":"35a0812886d38ed3318e8ff1b6b3cd4e71954818b1cc082f806c4a805d1e591e"} Jan 26 00:28:27 crc kubenswrapper[4975]: I0126 00:28:27.601820 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" podStartSLOduration=1.601793068 podStartE2EDuration="1.601793068s" podCreationTimestamp="2026-01-26 00:28:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:28:27.598459874 +0000 UTC m=+1291.719665368" watchObservedRunningTime="2026-01-26 00:28:27.601793068 +0000 UTC m=+1291.722998562" Jan 26 00:28:40 crc kubenswrapper[4975]: I0126 00:28:40.481320 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:28:40 crc kubenswrapper[4975]: I0126 00:28:40.482387 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:28:59 crc kubenswrapper[4975]: I0126 00:28:59.902189 4975 generic.go:334] "Generic (PLEG): container finished" podID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerID="adeb9c790c028a7480e0b78cdc5d489bf7d3e365d4f414a6dda0d89d9f23cf02" exitCode=1 Jan 26 00:28:59 crc kubenswrapper[4975]: I0126 00:28:59.902267 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerDied","Data":"adeb9c790c028a7480e0b78cdc5d489bf7d3e365d4f414a6dda0d89d9f23cf02"} Jan 26 00:28:59 crc kubenswrapper[4975]: I0126 00:28:59.904032 4975 scope.go:117] "RemoveContainer" containerID="adeb9c790c028a7480e0b78cdc5d489bf7d3e365d4f414a6dda0d89d9f23cf02" Jan 26 00:29:00 crc kubenswrapper[4975]: I0126 00:29:00.914293 4975 generic.go:334] "Generic (PLEG): container finished" podID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerID="ddb94d0a64a592dae1b4a944b42a92c321df45bd346d3ed40831708df04fa1cb" exitCode=1 Jan 26 00:29:00 crc kubenswrapper[4975]: I0126 00:29:00.914390 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerDied","Data":"ddb94d0a64a592dae1b4a944b42a92c321df45bd346d3ed40831708df04fa1cb"} Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.195931 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396245 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5fl2\" (UniqueName: \"kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396355 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396405 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396429 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396519 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396555 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.396575 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher\") pod \"c3e33d23-1e37-41be-9af2-e12fca999dbe\" (UID: \"c3e33d23-1e37-41be-9af2-e12fca999dbe\") " Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.409080 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2" (OuterVolumeSpecName: "kube-api-access-s5fl2") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "kube-api-access-s5fl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.422034 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.422085 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.422546 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.423973 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.425592 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.433177 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "c3e33d23-1e37-41be-9af2-e12fca999dbe" (UID: "c3e33d23-1e37-41be-9af2-e12fca999dbe"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498867 4975 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498911 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498925 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498935 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5fl2\" (UniqueName: \"kubernetes.io/projected/c3e33d23-1e37-41be-9af2-e12fca999dbe-kube-api-access-s5fl2\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498944 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498952 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.498961 4975 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/c3e33d23-1e37-41be-9af2-e12fca999dbe-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.937059 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" event={"ID":"c3e33d23-1e37-41be-9af2-e12fca999dbe","Type":"ContainerDied","Data":"35a0812886d38ed3318e8ff1b6b3cd4e71954818b1cc082f806c4a805d1e591e"} Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.937140 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="35a0812886d38ed3318e8ff1b6b3cd4e71954818b1cc082f806c4a805d1e591e" Jan 26 00:29:02 crc kubenswrapper[4975]: I0126 00:29:02.937142 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-dwrsq" Jan 26 00:29:10 crc kubenswrapper[4975]: I0126 00:29:10.481679 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:29:10 crc kubenswrapper[4975]: I0126 00:29:10.482312 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.039385 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-tmxg4"] Jan 26 00:29:20 crc kubenswrapper[4975]: E0126 00:29:20.040927 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-ceilometer" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.040955 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-ceilometer" Jan 26 00:29:20 crc kubenswrapper[4975]: E0126 00:29:20.040991 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-collectd" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.041004 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-collectd" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.041255 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-ceilometer" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.041291 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3e33d23-1e37-41be-9af2-e12fca999dbe" containerName="smoketest-collectd" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.043002 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.046311 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.046413 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.046547 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.052589 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.052867 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.055020 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-tmxg4"] Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.055854 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.108802 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.108866 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.108961 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.109021 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4glcv\" (UniqueName: \"kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.109093 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.109138 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.109171 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.210632 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.210755 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.210801 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.210833 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.210868 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4glcv\" (UniqueName: \"kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.211099 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.211121 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.211987 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.211995 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.212230 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.212230 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.212336 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.212980 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.234684 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4glcv\" (UniqueName: \"kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv\") pod \"stf-smoketest-smoke1-tmxg4\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.368086 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:20 crc kubenswrapper[4975]: I0126 00:29:20.650412 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-tmxg4"] Jan 26 00:29:21 crc kubenswrapper[4975]: I0126 00:29:21.090443 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerStarted","Data":"af28a4d03fb97a7da58cbd47edf1d7ce2315bb8224372ececd69ecc9b9d66b00"} Jan 26 00:29:21 crc kubenswrapper[4975]: I0126 00:29:21.091122 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerStarted","Data":"3e5bbfd1a7c0793fffcc464425a6fbf3e7d29f5b2d7cc0e2b0db16d95119bf68"} Jan 26 00:29:21 crc kubenswrapper[4975]: I0126 00:29:21.091146 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerStarted","Data":"475aa01c7aa0a15253eb853ced7c0322d2264f99d17243f5aa99d293304eb2ca"} Jan 26 00:29:21 crc kubenswrapper[4975]: I0126 00:29:21.128389 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" podStartSLOduration=1.128361418 podStartE2EDuration="1.128361418s" podCreationTimestamp="2026-01-26 00:29:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-26 00:29:21.117930882 +0000 UTC m=+1345.239136376" watchObservedRunningTime="2026-01-26 00:29:21.128361418 +0000 UTC m=+1345.249566912" Jan 26 00:29:40 crc kubenswrapper[4975]: I0126 00:29:40.481397 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:29:40 crc kubenswrapper[4975]: I0126 00:29:40.482227 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:29:40 crc kubenswrapper[4975]: I0126 00:29:40.482302 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:29:40 crc kubenswrapper[4975]: I0126 00:29:40.483363 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:29:40 crc kubenswrapper[4975]: I0126 00:29:40.483471 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5" gracePeriod=600 Jan 26 00:29:41 crc kubenswrapper[4975]: I0126 00:29:41.624598 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5" exitCode=0 Jan 26 00:29:41 crc kubenswrapper[4975]: I0126 00:29:41.624683 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5"} Jan 26 00:29:41 crc kubenswrapper[4975]: I0126 00:29:41.625815 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerStarted","Data":"4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060"} Jan 26 00:29:41 crc kubenswrapper[4975]: I0126 00:29:41.625895 4975 scope.go:117] "RemoveContainer" containerID="2ec9837c4f02f5e5de7675625d8c048c082c725779a8095fa603812c7a7ca1e8" Jan 26 00:29:53 crc kubenswrapper[4975]: I0126 00:29:53.751230 4975 generic.go:334] "Generic (PLEG): container finished" podID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerID="af28a4d03fb97a7da58cbd47edf1d7ce2315bb8224372ececd69ecc9b9d66b00" exitCode=0 Jan 26 00:29:53 crc kubenswrapper[4975]: I0126 00:29:53.751491 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerDied","Data":"af28a4d03fb97a7da58cbd47edf1d7ce2315bb8224372ececd69ecc9b9d66b00"} Jan 26 00:29:53 crc kubenswrapper[4975]: I0126 00:29:53.753373 4975 scope.go:117] "RemoveContainer" containerID="af28a4d03fb97a7da58cbd47edf1d7ce2315bb8224372ececd69ecc9b9d66b00" Jan 26 00:29:54 crc kubenswrapper[4975]: I0126 00:29:54.763138 4975 generic.go:334] "Generic (PLEG): container finished" podID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerID="3e5bbfd1a7c0793fffcc464425a6fbf3e7d29f5b2d7cc0e2b0db16d95119bf68" exitCode=0 Jan 26 00:29:54 crc kubenswrapper[4975]: I0126 00:29:54.763281 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerDied","Data":"3e5bbfd1a7c0793fffcc464425a6fbf3e7d29f5b2d7cc0e2b0db16d95119bf68"} Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.085456 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198403 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198508 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4glcv\" (UniqueName: \"kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198600 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198650 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198751 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198773 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.198793 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher\") pod \"3a954d9d-e67b-4d74-ba84-047628ceee6e\" (UID: \"3a954d9d-e67b-4d74-ba84-047628ceee6e\") " Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.206853 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv" (OuterVolumeSpecName: "kube-api-access-4glcv") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "kube-api-access-4glcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.220401 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.223621 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.223682 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.223993 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.225048 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.234295 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "3a954d9d-e67b-4d74-ba84-047628ceee6e" (UID: "3a954d9d-e67b-4d74-ba84-047628ceee6e"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302542 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302618 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4glcv\" (UniqueName: \"kubernetes.io/projected/3a954d9d-e67b-4d74-ba84-047628ceee6e-kube-api-access-4glcv\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302633 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302648 4975 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302661 4975 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302674 4975 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.302698 4975 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/3a954d9d-e67b-4d74-ba84-047628ceee6e-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.794664 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" event={"ID":"3a954d9d-e67b-4d74-ba84-047628ceee6e","Type":"ContainerDied","Data":"475aa01c7aa0a15253eb853ced7c0322d2264f99d17243f5aa99d293304eb2ca"} Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.794723 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="475aa01c7aa0a15253eb853ced7c0322d2264f99d17243f5aa99d293304eb2ca" Jan 26 00:29:56 crc kubenswrapper[4975]: I0126 00:29:56.794891 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-tmxg4" Jan 26 00:29:58 crc kubenswrapper[4975]: I0126 00:29:58.014685 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-dwrsq_c3e33d23-1e37-41be-9af2-e12fca999dbe/smoketest-collectd/0.log" Jan 26 00:29:58 crc kubenswrapper[4975]: I0126 00:29:58.285635 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-dwrsq_c3e33d23-1e37-41be-9af2-e12fca999dbe/smoketest-ceilometer/0.log" Jan 26 00:29:58 crc kubenswrapper[4975]: I0126 00:29:58.536671 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-nkbzp_a6053679-c924-4974-a98a-1e7e7a333e4d/default-interconnect/0.log" Jan 26 00:29:58 crc kubenswrapper[4975]: I0126 00:29:58.809319 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j_0d7d25f6-4c23-4290-a03c-73691e6237a4/bridge/2.log" Jan 26 00:29:59 crc kubenswrapper[4975]: I0126 00:29:59.075066 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-9kb2j_0d7d25f6-4c23-4290-a03c-73691e6237a4/sg-core/0.log" Jan 26 00:29:59 crc kubenswrapper[4975]: I0126 00:29:59.360058 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7_90b625d1-1889-4c96-a8ea-1b5f60915c53/bridge/2.log" Jan 26 00:29:59 crc kubenswrapper[4975]: I0126 00:29:59.619339 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-6fdc755776-p5fl7_90b625d1-1889-4c96-a8ea-1b5f60915c53/sg-core/0.log" Jan 26 00:29:59 crc kubenswrapper[4975]: I0126 00:29:59.899867 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg_26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529/bridge/2.log" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.164781 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-xklqg_26fdd3e0-b1fa-4c8a-984d-7e42b7ea6529/sg-core/0.log" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.170895 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7"] Jan 26 00:30:00 crc kubenswrapper[4975]: E0126 00:30:00.171313 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-collectd" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.171333 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-collectd" Jan 26 00:30:00 crc kubenswrapper[4975]: E0126 00:30:00.171355 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-ceilometer" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.171364 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-ceilometer" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.171562 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-collectd" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.171587 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a954d9d-e67b-4d74-ba84-047628ceee6e" containerName="smoketest-ceilometer" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.172437 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.175385 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.175418 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.178393 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7"] Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.275560 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.275633 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.276141 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmmzk\" (UniqueName: \"kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.378351 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmmzk\" (UniqueName: \"kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.378450 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.378473 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.380343 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.386377 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.402210 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmmzk\" (UniqueName: \"kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk\") pod \"collect-profiles-29489790-hz9r7\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.418829 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq_51e86811-c442-451e-b8b3-1680edf110e4/bridge/2.log" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.526658 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.686780 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-6d4df847bb-mxbzq_51e86811-c442-451e-b8b3-1680edf110e4/sg-core/0.log" Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.781563 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7"] Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.829526 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" event={"ID":"13f4ad42-4a5d-4686-990a-0c5491692c02","Type":"ContainerStarted","Data":"eb09536bf246343a060227db0bb84168e4594fa913d9c9e021ea85435dead1e5"} Jan 26 00:30:00 crc kubenswrapper[4975]: I0126 00:30:00.957369 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s_5189d2e4-ce9c-4cb8-955a-22b1edde1b70/bridge/2.log" Jan 26 00:30:01 crc kubenswrapper[4975]: I0126 00:30:01.241719 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-jd25s_5189d2e4-ce9c-4cb8-955a-22b1edde1b70/sg-core/0.log" Jan 26 00:30:01 crc kubenswrapper[4975]: I0126 00:30:01.842303 4975 generic.go:334] "Generic (PLEG): container finished" podID="13f4ad42-4a5d-4686-990a-0c5491692c02" containerID="849f461efdc0c4f9c07d9f66961d014850621ddb2f338795f8517087996a8ff2" exitCode=0 Jan 26 00:30:01 crc kubenswrapper[4975]: I0126 00:30:01.842369 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" event={"ID":"13f4ad42-4a5d-4686-990a-0c5491692c02","Type":"ContainerDied","Data":"849f461efdc0c4f9c07d9f66961d014850621ddb2f338795f8517087996a8ff2"} Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.120354 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.305913 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume\") pod \"13f4ad42-4a5d-4686-990a-0c5491692c02\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.305970 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume\") pod \"13f4ad42-4a5d-4686-990a-0c5491692c02\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.306198 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmmzk\" (UniqueName: \"kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk\") pod \"13f4ad42-4a5d-4686-990a-0c5491692c02\" (UID: \"13f4ad42-4a5d-4686-990a-0c5491692c02\") " Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.307289 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume" (OuterVolumeSpecName: "config-volume") pod "13f4ad42-4a5d-4686-990a-0c5491692c02" (UID: "13f4ad42-4a5d-4686-990a-0c5491692c02"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.316179 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk" (OuterVolumeSpecName: "kube-api-access-lmmzk") pod "13f4ad42-4a5d-4686-990a-0c5491692c02" (UID: "13f4ad42-4a5d-4686-990a-0c5491692c02"). InnerVolumeSpecName "kube-api-access-lmmzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.316986 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "13f4ad42-4a5d-4686-990a-0c5491692c02" (UID: "13f4ad42-4a5d-4686-990a-0c5491692c02"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.407840 4975 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/13f4ad42-4a5d-4686-990a-0c5491692c02-config-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.408263 4975 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/13f4ad42-4a5d-4686-990a-0c5491692c02-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.408356 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmmzk\" (UniqueName: \"kubernetes.io/projected/13f4ad42-4a5d-4686-990a-0c5491692c02-kube-api-access-lmmzk\") on node \"crc\" DevicePath \"\"" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.859543 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" event={"ID":"13f4ad42-4a5d-4686-990a-0c5491692c02","Type":"ContainerDied","Data":"eb09536bf246343a060227db0bb84168e4594fa913d9c9e021ea85435dead1e5"} Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.859595 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29489790-hz9r7" Jan 26 00:30:03 crc kubenswrapper[4975]: I0126 00:30:03.859603 4975 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb09536bf246343a060227db0bb84168e4594fa913d9c9e021ea85435dead1e5" Jan 26 00:30:04 crc kubenswrapper[4975]: I0126 00:30:04.286011 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-4bswt_96b49c24-2f17-4321-a1ad-60db89870c43/operator/0.log" Jan 26 00:30:04 crc kubenswrapper[4975]: I0126 00:30:04.583886 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_a705e25a-ff6d-4954-8c70-ae6d973c3d0e/prometheus/0.log" Jan 26 00:30:04 crc kubenswrapper[4975]: I0126 00:30:04.963174 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_9ca3c33e-c168-4aec-b194-821f5b3f3995/elasticsearch/0.log" Jan 26 00:30:05 crc kubenswrapper[4975]: I0126 00:30:05.278859 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-nt9q7_cc95813a-9cba-437a-bce6-98252790efed/prometheus-webhook-snmp/0.log" Jan 26 00:30:05 crc kubenswrapper[4975]: I0126 00:30:05.557565 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_2ca91482-436e-48bf-8d3b-256ab164a837/alertmanager/0.log" Jan 26 00:30:19 crc kubenswrapper[4975]: I0126 00:30:19.433187 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-55b89ddfb9-zxp9z_9625ef50-fe72-40a4-bd92-c3c714a02ac7/operator/0.log" Jan 26 00:30:22 crc kubenswrapper[4975]: I0126 00:30:22.785227 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-4bswt_96b49c24-2f17-4321-a1ad-60db89870c43/operator/0.log" Jan 26 00:30:23 crc kubenswrapper[4975]: I0126 00:30:23.082018 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_30b7c47f-532b-4984-9858-0df6cfcf30d5/qdr/0.log" Jan 26 00:30:30 crc kubenswrapper[4975]: I0126 00:30:30.902580 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:30 crc kubenswrapper[4975]: E0126 00:30:30.903771 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13f4ad42-4a5d-4686-990a-0c5491692c02" containerName="collect-profiles" Jan 26 00:30:30 crc kubenswrapper[4975]: I0126 00:30:30.903788 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="13f4ad42-4a5d-4686-990a-0c5491692c02" containerName="collect-profiles" Jan 26 00:30:30 crc kubenswrapper[4975]: I0126 00:30:30.903920 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="13f4ad42-4a5d-4686-990a-0c5491692c02" containerName="collect-profiles" Jan 26 00:30:30 crc kubenswrapper[4975]: I0126 00:30:30.904462 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:30 crc kubenswrapper[4975]: I0126 00:30:30.919192 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:31 crc kubenswrapper[4975]: I0126 00:30:31.080160 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shxlc\" (UniqueName: \"kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc\") pod \"infrawatch-operators-76r9n\" (UID: \"9c9f33c2-f94c-4df9-979a-c677c12d9617\") " pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:31 crc kubenswrapper[4975]: I0126 00:30:31.182338 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shxlc\" (UniqueName: \"kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc\") pod \"infrawatch-operators-76r9n\" (UID: \"9c9f33c2-f94c-4df9-979a-c677c12d9617\") " pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:31 crc kubenswrapper[4975]: I0126 00:30:31.223296 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shxlc\" (UniqueName: \"kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc\") pod \"infrawatch-operators-76r9n\" (UID: \"9c9f33c2-f94c-4df9-979a-c677c12d9617\") " pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:31 crc kubenswrapper[4975]: I0126 00:30:31.266238 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:31 crc kubenswrapper[4975]: I0126 00:30:31.487194 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:32 crc kubenswrapper[4975]: I0126 00:30:32.110990 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-76r9n" event={"ID":"9c9f33c2-f94c-4df9-979a-c677c12d9617","Type":"ContainerStarted","Data":"45311871a7e11b708d06775a8af6a25234627be1b4a1b7f206fb63700fd0a0d4"} Jan 26 00:30:32 crc kubenswrapper[4975]: I0126 00:30:32.111094 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-76r9n" event={"ID":"9c9f33c2-f94c-4df9-979a-c677c12d9617","Type":"ContainerStarted","Data":"cbd4c08e5b5b5f6081e34e474ed5940dfce7eff97555b3c0dbb00520bfaaa625"} Jan 26 00:30:32 crc kubenswrapper[4975]: I0126 00:30:32.131924 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-76r9n" podStartSLOduration=2.011926048 podStartE2EDuration="2.131905426s" podCreationTimestamp="2026-01-26 00:30:30 +0000 UTC" firstStartedPulling="2026-01-26 00:30:31.496388694 +0000 UTC m=+1415.617594188" lastFinishedPulling="2026-01-26 00:30:31.616368042 +0000 UTC m=+1415.737573566" observedRunningTime="2026-01-26 00:30:32.130790694 +0000 UTC m=+1416.251996208" watchObservedRunningTime="2026-01-26 00:30:32.131905426 +0000 UTC m=+1416.253110920" Jan 26 00:30:41 crc kubenswrapper[4975]: I0126 00:30:41.267883 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:41 crc kubenswrapper[4975]: I0126 00:30:41.268508 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:41 crc kubenswrapper[4975]: I0126 00:30:41.298683 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:42 crc kubenswrapper[4975]: I0126 00:30:42.274537 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:44 crc kubenswrapper[4975]: I0126 00:30:44.892686 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:44 crc kubenswrapper[4975]: I0126 00:30:44.893486 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/infrawatch-operators-76r9n" podUID="9c9f33c2-f94c-4df9-979a-c677c12d9617" containerName="registry-server" containerID="cri-o://45311871a7e11b708d06775a8af6a25234627be1b4a1b7f206fb63700fd0a0d4" gracePeriod=2 Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.237433 4975 generic.go:334] "Generic (PLEG): container finished" podID="9c9f33c2-f94c-4df9-979a-c677c12d9617" containerID="45311871a7e11b708d06775a8af6a25234627be1b4a1b7f206fb63700fd0a0d4" exitCode=0 Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.237989 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-76r9n" event={"ID":"9c9f33c2-f94c-4df9-979a-c677c12d9617","Type":"ContainerDied","Data":"45311871a7e11b708d06775a8af6a25234627be1b4a1b7f206fb63700fd0a0d4"} Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.328156 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.354892 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shxlc\" (UniqueName: \"kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc\") pod \"9c9f33c2-f94c-4df9-979a-c677c12d9617\" (UID: \"9c9f33c2-f94c-4df9-979a-c677c12d9617\") " Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.364114 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc" (OuterVolumeSpecName: "kube-api-access-shxlc") pod "9c9f33c2-f94c-4df9-979a-c677c12d9617" (UID: "9c9f33c2-f94c-4df9-979a-c677c12d9617"). InnerVolumeSpecName "kube-api-access-shxlc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:30:45 crc kubenswrapper[4975]: I0126 00:30:45.456975 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shxlc\" (UniqueName: \"kubernetes.io/projected/9c9f33c2-f94c-4df9-979a-c677c12d9617-kube-api-access-shxlc\") on node \"crc\" DevicePath \"\"" Jan 26 00:30:46 crc kubenswrapper[4975]: I0126 00:30:46.249538 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-76r9n" event={"ID":"9c9f33c2-f94c-4df9-979a-c677c12d9617","Type":"ContainerDied","Data":"cbd4c08e5b5b5f6081e34e474ed5940dfce7eff97555b3c0dbb00520bfaaa625"} Jan 26 00:30:46 crc kubenswrapper[4975]: I0126 00:30:46.250222 4975 scope.go:117] "RemoveContainer" containerID="45311871a7e11b708d06775a8af6a25234627be1b4a1b7f206fb63700fd0a0d4" Jan 26 00:30:46 crc kubenswrapper[4975]: I0126 00:30:46.249780 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-76r9n" Jan 26 00:30:46 crc kubenswrapper[4975]: I0126 00:30:46.285053 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:46 crc kubenswrapper[4975]: I0126 00:30:46.292668 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/infrawatch-operators-76r9n"] Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.803955 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-nvgnd/must-gather-qnpzx"] Jan 26 00:30:47 crc kubenswrapper[4975]: E0126 00:30:47.804359 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c9f33c2-f94c-4df9-979a-c677c12d9617" containerName="registry-server" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.804372 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c9f33c2-f94c-4df9-979a-c677c12d9617" containerName="registry-server" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.804512 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c9f33c2-f94c-4df9-979a-c677c12d9617" containerName="registry-server" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.805266 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.808895 4975 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-nvgnd"/"default-dockercfg-nbpqw" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.809669 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nvgnd"/"openshift-service-ca.crt" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.811774 4975 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-nvgnd"/"kube-root-ca.crt" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.832463 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nvgnd/must-gather-qnpzx"] Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.895612 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnzj2\" (UniqueName: \"kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.895947 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.997983 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnzj2\" (UniqueName: \"kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.998113 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:47 crc kubenswrapper[4975]: I0126 00:30:47.998797 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:48 crc kubenswrapper[4975]: I0126 00:30:48.027363 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnzj2\" (UniqueName: \"kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2\") pod \"must-gather-qnpzx\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:48 crc kubenswrapper[4975]: I0126 00:30:48.125777 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:30:48 crc kubenswrapper[4975]: I0126 00:30:48.156951 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c9f33c2-f94c-4df9-979a-c677c12d9617" path="/var/lib/kubelet/pods/9c9f33c2-f94c-4df9-979a-c677c12d9617/volumes" Jan 26 00:30:48 crc kubenswrapper[4975]: I0126 00:30:48.398835 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-nvgnd/must-gather-qnpzx"] Jan 26 00:30:49 crc kubenswrapper[4975]: I0126 00:30:49.294443 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" event={"ID":"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a","Type":"ContainerStarted","Data":"62b9ac27a1566ed5194b045d5d5abc46bce8856d18064a08f16d1e2004e862a9"} Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.500278 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.506030 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.516607 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.555037 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbddk\" (UniqueName: \"kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.555122 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.555274 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.656295 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbddk\" (UniqueName: \"kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.656358 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.656448 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.656995 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.657160 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.683097 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbddk\" (UniqueName: \"kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk\") pod \"certified-operators-84zwl\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.692615 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.694345 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.713207 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.757913 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.758445 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.758617 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfkds\" (UniqueName: \"kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.837862 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.860652 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.860778 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfkds\" (UniqueName: \"kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.860868 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.862138 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.862204 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:55 crc kubenswrapper[4975]: I0126 00:30:55.898678 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfkds\" (UniqueName: \"kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds\") pod \"redhat-operators-wz8ww\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:56 crc kubenswrapper[4975]: I0126 00:30:56.030425 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:30:57 crc kubenswrapper[4975]: I0126 00:30:57.298187 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:30:57 crc kubenswrapper[4975]: W0126 00:30:57.318209 4975 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b25b5ae_cd82_4ff1_9c89_08464c53180b.slice/crio-f8145aba5254801204713c1bcc03ab1cb44aa111e02d711aeda9acecdf9f8cbe WatchSource:0}: Error finding container f8145aba5254801204713c1bcc03ab1cb44aa111e02d711aeda9acecdf9f8cbe: Status 404 returned error can't find the container with id f8145aba5254801204713c1bcc03ab1cb44aa111e02d711aeda9acecdf9f8cbe Jan 26 00:30:57 crc kubenswrapper[4975]: I0126 00:30:57.387339 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerStarted","Data":"f8145aba5254801204713c1bcc03ab1cb44aa111e02d711aeda9acecdf9f8cbe"} Jan 26 00:30:57 crc kubenswrapper[4975]: I0126 00:30:57.393922 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" event={"ID":"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a","Type":"ContainerStarted","Data":"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234"} Jan 26 00:30:57 crc kubenswrapper[4975]: I0126 00:30:57.523396 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.403602 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" event={"ID":"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a","Type":"ContainerStarted","Data":"c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83"} Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.405179 4975 generic.go:334] "Generic (PLEG): container finished" podID="14333534-4a72-4694-bd91-e253c1cc8e63" containerID="74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2" exitCode=0 Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.405248 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerDied","Data":"74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2"} Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.405286 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerStarted","Data":"4470f5b45ac4bf73f60a70f620fa9df41698589faeb7a0e6a4846c2cb23aa060"} Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.406474 4975 generic.go:334] "Generic (PLEG): container finished" podID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerID="07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e" exitCode=0 Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.406519 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerDied","Data":"07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e"} Jan 26 00:30:58 crc kubenswrapper[4975]: I0126 00:30:58.428725 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" podStartSLOduration=2.869144769 podStartE2EDuration="11.428658118s" podCreationTimestamp="2026-01-26 00:30:47 +0000 UTC" firstStartedPulling="2026-01-26 00:30:48.415018552 +0000 UTC m=+1432.536224046" lastFinishedPulling="2026-01-26 00:30:56.974531901 +0000 UTC m=+1441.095737395" observedRunningTime="2026-01-26 00:30:58.41920431 +0000 UTC m=+1442.540409814" watchObservedRunningTime="2026-01-26 00:30:58.428658118 +0000 UTC m=+1442.549863612" Jan 26 00:30:59 crc kubenswrapper[4975]: I0126 00:30:59.415398 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerStarted","Data":"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655"} Jan 26 00:30:59 crc kubenswrapper[4975]: I0126 00:30:59.418506 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerStarted","Data":"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1"} Jan 26 00:31:00 crc kubenswrapper[4975]: I0126 00:31:00.429133 4975 generic.go:334] "Generic (PLEG): container finished" podID="14333534-4a72-4694-bd91-e253c1cc8e63" containerID="63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655" exitCode=0 Jan 26 00:31:00 crc kubenswrapper[4975]: I0126 00:31:00.429250 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerDied","Data":"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655"} Jan 26 00:31:01 crc kubenswrapper[4975]: I0126 00:31:01.440803 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerStarted","Data":"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5"} Jan 26 00:31:01 crc kubenswrapper[4975]: I0126 00:31:01.443432 4975 generic.go:334] "Generic (PLEG): container finished" podID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerID="205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1" exitCode=0 Jan 26 00:31:01 crc kubenswrapper[4975]: I0126 00:31:01.443535 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerDied","Data":"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1"} Jan 26 00:31:01 crc kubenswrapper[4975]: I0126 00:31:01.462545 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-84zwl" podStartSLOduration=3.919168045 podStartE2EDuration="6.462520379s" podCreationTimestamp="2026-01-26 00:30:55 +0000 UTC" firstStartedPulling="2026-01-26 00:30:58.408045984 +0000 UTC m=+1442.529251478" lastFinishedPulling="2026-01-26 00:31:00.951398318 +0000 UTC m=+1445.072603812" observedRunningTime="2026-01-26 00:31:01.46147826 +0000 UTC m=+1445.582683764" watchObservedRunningTime="2026-01-26 00:31:01.462520379 +0000 UTC m=+1445.583725873" Jan 26 00:31:03 crc kubenswrapper[4975]: I0126 00:31:03.477343 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerStarted","Data":"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082"} Jan 26 00:31:03 crc kubenswrapper[4975]: I0126 00:31:03.504915 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wz8ww" podStartSLOduration=4.934100129 podStartE2EDuration="8.504898621s" podCreationTimestamp="2026-01-26 00:30:55 +0000 UTC" firstStartedPulling="2026-01-26 00:30:58.408399644 +0000 UTC m=+1442.529605138" lastFinishedPulling="2026-01-26 00:31:01.979198136 +0000 UTC m=+1446.100403630" observedRunningTime="2026-01-26 00:31:03.501765372 +0000 UTC m=+1447.622970886" watchObservedRunningTime="2026-01-26 00:31:03.504898621 +0000 UTC m=+1447.626104115" Jan 26 00:31:05 crc kubenswrapper[4975]: I0126 00:31:05.838676 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:05 crc kubenswrapper[4975]: I0126 00:31:05.838971 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:05 crc kubenswrapper[4975]: I0126 00:31:05.889526 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:06 crc kubenswrapper[4975]: I0126 00:31:06.030823 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:06 crc kubenswrapper[4975]: I0126 00:31:06.030911 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:06 crc kubenswrapper[4975]: I0126 00:31:06.540490 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:06 crc kubenswrapper[4975]: I0126 00:31:06.878562 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:31:07 crc kubenswrapper[4975]: I0126 00:31:07.075640 4975 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wz8ww" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="registry-server" probeResult="failure" output=< Jan 26 00:31:07 crc kubenswrapper[4975]: timeout: failed to connect service ":50051" within 1s Jan 26 00:31:07 crc kubenswrapper[4975]: > Jan 26 00:31:08 crc kubenswrapper[4975]: I0126 00:31:08.526512 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-84zwl" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="registry-server" containerID="cri-o://e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5" gracePeriod=2 Jan 26 00:31:08 crc kubenswrapper[4975]: I0126 00:31:08.930905 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.105007 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbddk\" (UniqueName: \"kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk\") pod \"14333534-4a72-4694-bd91-e253c1cc8e63\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.105103 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities\") pod \"14333534-4a72-4694-bd91-e253c1cc8e63\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.105194 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content\") pod \"14333534-4a72-4694-bd91-e253c1cc8e63\" (UID: \"14333534-4a72-4694-bd91-e253c1cc8e63\") " Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.106340 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities" (OuterVolumeSpecName: "utilities") pod "14333534-4a72-4694-bd91-e253c1cc8e63" (UID: "14333534-4a72-4694-bd91-e253c1cc8e63"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.112972 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk" (OuterVolumeSpecName: "kube-api-access-bbddk") pod "14333534-4a72-4694-bd91-e253c1cc8e63" (UID: "14333534-4a72-4694-bd91-e253c1cc8e63"). InnerVolumeSpecName "kube-api-access-bbddk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.149985 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "14333534-4a72-4694-bd91-e253c1cc8e63" (UID: "14333534-4a72-4694-bd91-e253c1cc8e63"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.212847 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.212900 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbddk\" (UniqueName: \"kubernetes.io/projected/14333534-4a72-4694-bd91-e253c1cc8e63-kube-api-access-bbddk\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.212915 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/14333534-4a72-4694-bd91-e253c1cc8e63-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.540503 4975 generic.go:334] "Generic (PLEG): container finished" podID="14333534-4a72-4694-bd91-e253c1cc8e63" containerID="e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5" exitCode=0 Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.540600 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerDied","Data":"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5"} Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.540669 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-84zwl" event={"ID":"14333534-4a72-4694-bd91-e253c1cc8e63","Type":"ContainerDied","Data":"4470f5b45ac4bf73f60a70f620fa9df41698589faeb7a0e6a4846c2cb23aa060"} Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.540660 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-84zwl" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.540700 4975 scope.go:117] "RemoveContainer" containerID="e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.577355 4975 scope.go:117] "RemoveContainer" containerID="63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.602294 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.608197 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-84zwl"] Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.610171 4975 scope.go:117] "RemoveContainer" containerID="74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.641470 4975 scope.go:117] "RemoveContainer" containerID="e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5" Jan 26 00:31:09 crc kubenswrapper[4975]: E0126 00:31:09.641889 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5\": container with ID starting with e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5 not found: ID does not exist" containerID="e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.641921 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5"} err="failed to get container status \"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5\": rpc error: code = NotFound desc = could not find container \"e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5\": container with ID starting with e8510661590e77a1a465ba32ce225139482df7fc10ef5933643e639fbdcc6fa5 not found: ID does not exist" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.641944 4975 scope.go:117] "RemoveContainer" containerID="63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655" Jan 26 00:31:09 crc kubenswrapper[4975]: E0126 00:31:09.642148 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655\": container with ID starting with 63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655 not found: ID does not exist" containerID="63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.642169 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655"} err="failed to get container status \"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655\": rpc error: code = NotFound desc = could not find container \"63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655\": container with ID starting with 63fc1b029d772cb7dfd3f491f8ca539962ae7cef29d8e2ea1ff8d8367fafa655 not found: ID does not exist" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.642183 4975 scope.go:117] "RemoveContainer" containerID="74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2" Jan 26 00:31:09 crc kubenswrapper[4975]: E0126 00:31:09.642381 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2\": container with ID starting with 74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2 not found: ID does not exist" containerID="74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2" Jan 26 00:31:09 crc kubenswrapper[4975]: I0126 00:31:09.642400 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2"} err="failed to get container status \"74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2\": rpc error: code = NotFound desc = could not find container \"74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2\": container with ID starting with 74ac13f4f4c01a297e49d513b8733fd36d9659490b885754b9d98bc099bf93a2 not found: ID does not exist" Jan 26 00:31:10 crc kubenswrapper[4975]: I0126 00:31:10.155980 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" path="/var/lib/kubelet/pods/14333534-4a72-4694-bd91-e253c1cc8e63/volumes" Jan 26 00:31:16 crc kubenswrapper[4975]: I0126 00:31:16.075255 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:16 crc kubenswrapper[4975]: I0126 00:31:16.122009 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:16 crc kubenswrapper[4975]: I0126 00:31:16.312075 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:31:17 crc kubenswrapper[4975]: I0126 00:31:17.599577 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wz8ww" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="registry-server" containerID="cri-o://1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082" gracePeriod=2 Jan 26 00:31:17 crc kubenswrapper[4975]: I0126 00:31:17.984064 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.176767 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities\") pod \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.177114 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sfkds\" (UniqueName: \"kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds\") pod \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.177232 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content\") pod \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\" (UID: \"3b25b5ae-cd82-4ff1-9c89-08464c53180b\") " Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.187926 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds" (OuterVolumeSpecName: "kube-api-access-sfkds") pod "3b25b5ae-cd82-4ff1-9c89-08464c53180b" (UID: "3b25b5ae-cd82-4ff1-9c89-08464c53180b"). InnerVolumeSpecName "kube-api-access-sfkds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.188422 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities" (OuterVolumeSpecName: "utilities") pod "3b25b5ae-cd82-4ff1-9c89-08464c53180b" (UID: "3b25b5ae-cd82-4ff1-9c89-08464c53180b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.278830 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sfkds\" (UniqueName: \"kubernetes.io/projected/3b25b5ae-cd82-4ff1-9c89-08464c53180b-kube-api-access-sfkds\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.279411 4975 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-utilities\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.306042 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3b25b5ae-cd82-4ff1-9c89-08464c53180b" (UID: "3b25b5ae-cd82-4ff1-9c89-08464c53180b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.390235 4975 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b25b5ae-cd82-4ff1-9c89-08464c53180b-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.608944 4975 generic.go:334] "Generic (PLEG): container finished" podID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerID="1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082" exitCode=0 Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.609021 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerDied","Data":"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082"} Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.609530 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wz8ww" event={"ID":"3b25b5ae-cd82-4ff1-9c89-08464c53180b","Type":"ContainerDied","Data":"f8145aba5254801204713c1bcc03ab1cb44aa111e02d711aeda9acecdf9f8cbe"} Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.609566 4975 scope.go:117] "RemoveContainer" containerID="1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.609072 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wz8ww" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.629498 4975 scope.go:117] "RemoveContainer" containerID="205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.654306 4975 scope.go:117] "RemoveContainer" containerID="07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.692665 4975 scope.go:117] "RemoveContainer" containerID="1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082" Jan 26 00:31:18 crc kubenswrapper[4975]: E0126 00:31:18.693247 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082\": container with ID starting with 1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082 not found: ID does not exist" containerID="1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.693291 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082"} err="failed to get container status \"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082\": rpc error: code = NotFound desc = could not find container \"1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082\": container with ID starting with 1fee0a5f9e3609a33f6b84568cbe82b4917ecaff88c40c735b13bf944595b082 not found: ID does not exist" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.693321 4975 scope.go:117] "RemoveContainer" containerID="205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1" Jan 26 00:31:18 crc kubenswrapper[4975]: E0126 00:31:18.694892 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1\": container with ID starting with 205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1 not found: ID does not exist" containerID="205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.695055 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1"} err="failed to get container status \"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1\": rpc error: code = NotFound desc = could not find container \"205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1\": container with ID starting with 205450eb7a7a509450ab1df29490ddf958f371427fe10b6310fa90a265b21cb1 not found: ID does not exist" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.695091 4975 scope.go:117] "RemoveContainer" containerID="07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e" Jan 26 00:31:18 crc kubenswrapper[4975]: E0126 00:31:18.695571 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e\": container with ID starting with 07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e not found: ID does not exist" containerID="07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.695596 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e"} err="failed to get container status \"07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e\": rpc error: code = NotFound desc = could not find container \"07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e\": container with ID starting with 07fa949c5a79991aa0a588fd77b46a9454c8f7b238e5e7ccad8cf8b0ea37864e not found: ID does not exist" Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.708688 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:31:18 crc kubenswrapper[4975]: I0126 00:31:18.715421 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wz8ww"] Jan 26 00:31:20 crc kubenswrapper[4975]: I0126 00:31:20.156717 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" path="/var/lib/kubelet/pods/3b25b5ae-cd82-4ff1-9c89-08464c53180b/volumes" Jan 26 00:31:40 crc kubenswrapper[4975]: I0126 00:31:40.481271 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:31:40 crc kubenswrapper[4975]: I0126 00:31:40.481914 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:31:45 crc kubenswrapper[4975]: I0126 00:31:45.397591 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-cctnf_e553fc00-cc5f-41e7-aeae-99be7ec861d4/control-plane-machine-set-operator/0.log" Jan 26 00:31:45 crc kubenswrapper[4975]: I0126 00:31:45.546263 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-xh2mk_be0d8f61-b7b4-48cf-a6cb-6780df7d99d6/kube-rbac-proxy/0.log" Jan 26 00:31:45 crc kubenswrapper[4975]: I0126 00:31:45.591854 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-xh2mk_be0d8f61-b7b4-48cf-a6cb-6780df7d99d6/machine-api-operator/0.log" Jan 26 00:31:59 crc kubenswrapper[4975]: I0126 00:31:59.086406 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-r2hdr_1e64f1df-24b4-4587-8d7e-9c79667d3575/cert-manager-controller/0.log" Jan 26 00:31:59 crc kubenswrapper[4975]: I0126 00:31:59.287640 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-x2vvk_44e30ae9-4d62-4cdd-92c9-5e20b05bec04/cert-manager-cainjector/0.log" Jan 26 00:31:59 crc kubenswrapper[4975]: I0126 00:31:59.320348 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-4lbtg_a9fa8b62-74b2-4068-b94e-1968a498a379/cert-manager-webhook/0.log" Jan 26 00:32:10 crc kubenswrapper[4975]: I0126 00:32:10.481708 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:32:10 crc kubenswrapper[4975]: I0126 00:32:10.483637 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:32:13 crc kubenswrapper[4975]: I0126 00:32:13.901450 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-cps6q_9523511a-d66c-42d3-86b2-5572b83ed21e/prometheus-operator/0.log" Jan 26 00:32:14 crc kubenswrapper[4975]: I0126 00:32:14.016351 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv_f01df3f6-766e-45bc-b28b-5fbf18581625/prometheus-operator-admission-webhook/0.log" Jan 26 00:32:14 crc kubenswrapper[4975]: I0126 00:32:14.076886 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n_f403cd64-91dd-45f8-b0b0-981505389e7a/prometheus-operator-admission-webhook/0.log" Jan 26 00:32:14 crc kubenswrapper[4975]: I0126 00:32:14.207078 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-gbqn9_47b2b3cb-f050-438c-b2d4-2ed6b594fad9/operator/0.log" Jan 26 00:32:14 crc kubenswrapper[4975]: I0126 00:32:14.330014 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6tfl5_93742037-1757-4c1c-b40d-c1e6065bdf8c/perses-operator/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.134574 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/util/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.343966 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/pull/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.344765 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/util/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.366108 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/pull/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.579886 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/extract/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.608421 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/util/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.612352 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931ajv8c6_e77f2296-579d-4d32-ad8d-d667b1350d50/pull/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.787441 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/util/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.933932 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/util/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.942765 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/pull/0.log" Jan 26 00:32:28 crc kubenswrapper[4975]: I0126 00:32:28.973917 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/pull/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.135885 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/util/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.144695 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/extract/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.145167 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8fgj495_56e34236-a062-4605-8a38-53bb6c213c74/pull/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.284763 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/util/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.525145 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/util/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.532354 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/pull/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.542237 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/pull/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.705631 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/util/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.746563 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/pull/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.814492 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5ehncjc_dd41d02b-525a-4ffd-ace0-ba6fde8853e2/extract/0.log" Jan 26 00:32:29 crc kubenswrapper[4975]: I0126 00:32:29.871189 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/util/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.054430 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/pull/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.060486 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/util/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.063516 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/pull/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.226720 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/util/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.232757 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/pull/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.242099 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f08dkpww_4cb0fa5f-6596-49f6-8925-d4c56b43a433/extract/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.422191 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-utilities/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.557907 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-content/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.572306 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-content/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.576633 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-utilities/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.863713 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-utilities/0.log" Jan 26 00:32:30 crc kubenswrapper[4975]: I0126 00:32:30.914982 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/extract-content/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.152355 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-utilities/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.163412 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jj6v7_2d3abc18-8fc6-4108-8d6e-b9268064a682/registry-server/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.317661 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-utilities/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.384487 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-content/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.384496 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-content/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.547452 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-content/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.570926 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/extract-utilities/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.789924 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kvfbr_b31428e9-0e62-40f7-b81f-96e44e63b0b5/marketplace-operator/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.843296 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-utilities/0.log" Jan 26 00:32:31 crc kubenswrapper[4975]: I0126 00:32:31.847795 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-xxssn_b81873b5-3563-4628-9cea-2e3837b4038c/registry-server/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.025027 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-content/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.034223 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-utilities/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.038394 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-content/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.215868 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-content/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.217019 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/extract-utilities/0.log" Jan 26 00:32:32 crc kubenswrapper[4975]: I0126 00:32:32.476403 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9xnqv_148781fe-d5ca-4956-822b-0bf9b8ba18d2/registry-server/0.log" Jan 26 00:32:40 crc kubenswrapper[4975]: I0126 00:32:40.481190 4975 patch_prober.go:28] interesting pod/machine-config-daemon-f42fk container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 26 00:32:40 crc kubenswrapper[4975]: I0126 00:32:40.481891 4975 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 26 00:32:40 crc kubenswrapper[4975]: I0126 00:32:40.481957 4975 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" Jan 26 00:32:40 crc kubenswrapper[4975]: I0126 00:32:40.482849 4975 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060"} pod="openshift-machine-config-operator/machine-config-daemon-f42fk" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 26 00:32:40 crc kubenswrapper[4975]: I0126 00:32:40.482924 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerName="machine-config-daemon" containerID="cri-o://4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" gracePeriod=600 Jan 26 00:32:40 crc kubenswrapper[4975]: E0126 00:32:40.627982 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:32:41 crc kubenswrapper[4975]: I0126 00:32:41.327241 4975 generic.go:334] "Generic (PLEG): container finished" podID="b76c31fb-14ea-4b49-8a41-0b2731967b86" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" exitCode=0 Jan 26 00:32:41 crc kubenswrapper[4975]: I0126 00:32:41.327297 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" event={"ID":"b76c31fb-14ea-4b49-8a41-0b2731967b86","Type":"ContainerDied","Data":"4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060"} Jan 26 00:32:41 crc kubenswrapper[4975]: I0126 00:32:41.327349 4975 scope.go:117] "RemoveContainer" containerID="950074c352f727e22ecb9292041c52e5cee133377c9a49581baee4c6166e42f5" Jan 26 00:32:41 crc kubenswrapper[4975]: I0126 00:32:41.327977 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:32:41 crc kubenswrapper[4975]: E0126 00:32:41.328309 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:32:44 crc kubenswrapper[4975]: I0126 00:32:44.472597 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-cps6q_9523511a-d66c-42d3-86b2-5572b83ed21e/prometheus-operator/0.log" Jan 26 00:32:44 crc kubenswrapper[4975]: I0126 00:32:44.477029 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-59b48b699b-ltxgv_f01df3f6-766e-45bc-b28b-5fbf18581625/prometheus-operator-admission-webhook/0.log" Jan 26 00:32:44 crc kubenswrapper[4975]: I0126 00:32:44.526077 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-59b48b699b-z5v4n_f403cd64-91dd-45f8-b0b0-981505389e7a/prometheus-operator-admission-webhook/0.log" Jan 26 00:32:44 crc kubenswrapper[4975]: I0126 00:32:44.651496 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-gbqn9_47b2b3cb-f050-438c-b2d4-2ed6b594fad9/operator/0.log" Jan 26 00:32:44 crc kubenswrapper[4975]: I0126 00:32:44.687435 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-6tfl5_93742037-1757-4c1c-b40d-c1e6065bdf8c/perses-operator/0.log" Jan 26 00:32:53 crc kubenswrapper[4975]: I0126 00:32:53.148165 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:32:53 crc kubenswrapper[4975]: E0126 00:32:53.149275 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:33:08 crc kubenswrapper[4975]: I0126 00:33:08.147640 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:33:08 crc kubenswrapper[4975]: E0126 00:33:08.148660 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:33:22 crc kubenswrapper[4975]: I0126 00:33:22.150688 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:33:22 crc kubenswrapper[4975]: E0126 00:33:22.151447 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:33:33 crc kubenswrapper[4975]: I0126 00:33:33.147488 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:33:33 crc kubenswrapper[4975]: E0126 00:33:33.148176 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:33:42 crc kubenswrapper[4975]: I0126 00:33:42.811636 4975 generic.go:334] "Generic (PLEG): container finished" podID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerID="8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234" exitCode=0 Jan 26 00:33:42 crc kubenswrapper[4975]: I0126 00:33:42.811794 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" event={"ID":"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a","Type":"ContainerDied","Data":"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234"} Jan 26 00:33:42 crc kubenswrapper[4975]: I0126 00:33:42.812749 4975 scope.go:117] "RemoveContainer" containerID="8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234" Jan 26 00:33:43 crc kubenswrapper[4975]: I0126 00:33:43.358238 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nvgnd_must-gather-qnpzx_ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a/gather/0.log" Jan 26 00:33:47 crc kubenswrapper[4975]: I0126 00:33:47.148192 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:33:47 crc kubenswrapper[4975]: E0126 00:33:47.149190 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.069145 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-nvgnd/must-gather-qnpzx"] Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.070136 4975 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="copy" containerID="cri-o://c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83" gracePeriod=2 Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.088542 4975 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-nvgnd/must-gather-qnpzx"] Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.465154 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nvgnd_must-gather-qnpzx_ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a/copy/0.log" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.466143 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.586120 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output\") pod \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.586248 4975 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gnzj2\" (UniqueName: \"kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2\") pod \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\" (UID: \"ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a\") " Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.592622 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2" (OuterVolumeSpecName: "kube-api-access-gnzj2") pod "ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" (UID: "ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a"). InnerVolumeSpecName "kube-api-access-gnzj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.640477 4975 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" (UID: "ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.688178 4975 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.688227 4975 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gnzj2\" (UniqueName: \"kubernetes.io/projected/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a-kube-api-access-gnzj2\") on node \"crc\" DevicePath \"\"" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.874060 4975 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-nvgnd_must-gather-qnpzx_ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a/copy/0.log" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.874676 4975 generic.go:334] "Generic (PLEG): container finished" podID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerID="c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83" exitCode=143 Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.874755 4975 scope.go:117] "RemoveContainer" containerID="c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.874921 4975 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-nvgnd/must-gather-qnpzx" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.915403 4975 scope.go:117] "RemoveContainer" containerID="8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234" Jan 26 00:33:50 crc kubenswrapper[4975]: E0126 00:33:50.932175 4975 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podef8bcd63_a3f9_40c4_ad5e_f477aa61d34a.slice\": RecentStats: unable to find data in memory cache]" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.960663 4975 scope.go:117] "RemoveContainer" containerID="c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83" Jan 26 00:33:50 crc kubenswrapper[4975]: E0126 00:33:50.964868 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83\": container with ID starting with c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83 not found: ID does not exist" containerID="c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.965646 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83"} err="failed to get container status \"c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83\": rpc error: code = NotFound desc = could not find container \"c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83\": container with ID starting with c82a3de702ce97c24e45849f239fbdad1e8a78d3b3cc42af7efea41a6c797d83 not found: ID does not exist" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.965683 4975 scope.go:117] "RemoveContainer" containerID="8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234" Jan 26 00:33:50 crc kubenswrapper[4975]: E0126 00:33:50.969831 4975 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234\": container with ID starting with 8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234 not found: ID does not exist" containerID="8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234" Jan 26 00:33:50 crc kubenswrapper[4975]: I0126 00:33:50.969873 4975 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234"} err="failed to get container status \"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234\": rpc error: code = NotFound desc = could not find container \"8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234\": container with ID starting with 8016cb4de5d33a349678ea6e7049461e530750f12da1683fbead9a8eb8473234 not found: ID does not exist" Jan 26 00:33:52 crc kubenswrapper[4975]: I0126 00:33:52.158322 4975 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" path="/var/lib/kubelet/pods/ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a/volumes" Jan 26 00:34:02 crc kubenswrapper[4975]: I0126 00:34:02.148000 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:34:02 crc kubenswrapper[4975]: E0126 00:34:02.148950 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:34:16 crc kubenswrapper[4975]: I0126 00:34:16.154321 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:34:16 crc kubenswrapper[4975]: E0126 00:34:16.155271 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:34:30 crc kubenswrapper[4975]: I0126 00:34:30.147869 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:34:30 crc kubenswrapper[4975]: E0126 00:34:30.148640 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:34:45 crc kubenswrapper[4975]: I0126 00:34:45.147980 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:34:45 crc kubenswrapper[4975]: E0126 00:34:45.148808 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:34:56 crc kubenswrapper[4975]: I0126 00:34:56.158667 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:34:56 crc kubenswrapper[4975]: E0126 00:34:56.159964 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:35:09 crc kubenswrapper[4975]: I0126 00:35:09.147013 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:35:09 crc kubenswrapper[4975]: E0126 00:35:09.148006 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:35:23 crc kubenswrapper[4975]: I0126 00:35:23.147335 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:35:23 crc kubenswrapper[4975]: E0126 00:35:23.148057 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:35:38 crc kubenswrapper[4975]: I0126 00:35:38.147633 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:35:38 crc kubenswrapper[4975]: E0126 00:35:38.148514 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:35:50 crc kubenswrapper[4975]: I0126 00:35:50.147995 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:35:50 crc kubenswrapper[4975]: E0126 00:35:50.150872 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.144693 4975 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-5tswz"] Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.146849 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="extract-utilities" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.146873 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="extract-utilities" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.146934 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="extract-content" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.146950 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="extract-content" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.146975 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="extract-content" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.146987 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="extract-content" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.147052 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="gather" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147065 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="gather" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.147123 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="copy" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147137 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="copy" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.147150 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="extract-utilities" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147160 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="extract-utilities" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.147222 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147235 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: E0126 00:35:57.147296 4975 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147309 4975 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147789 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b25b5ae-cd82-4ff1-9c89-08464c53180b" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147849 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="copy" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147880 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="14333534-4a72-4694-bd91-e253c1cc8e63" containerName="registry-server" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.147897 4975 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef8bcd63-a3f9-40c4-ad5e-f477aa61d34a" containerName="gather" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.176418 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.185398 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-5tswz"] Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.242775 4975 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svxwb\" (UniqueName: \"kubernetes.io/projected/38371a9b-7702-4347-b66a-04b385dc50e5-kube-api-access-svxwb\") pod \"infrawatch-operators-5tswz\" (UID: \"38371a9b-7702-4347-b66a-04b385dc50e5\") " pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.343973 4975 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svxwb\" (UniqueName: \"kubernetes.io/projected/38371a9b-7702-4347-b66a-04b385dc50e5-kube-api-access-svxwb\") pod \"infrawatch-operators-5tswz\" (UID: \"38371a9b-7702-4347-b66a-04b385dc50e5\") " pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.367726 4975 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svxwb\" (UniqueName: \"kubernetes.io/projected/38371a9b-7702-4347-b66a-04b385dc50e5-kube-api-access-svxwb\") pod \"infrawatch-operators-5tswz\" (UID: \"38371a9b-7702-4347-b66a-04b385dc50e5\") " pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.504081 4975 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.940049 4975 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-5tswz"] Jan 26 00:35:57 crc kubenswrapper[4975]: I0126 00:35:57.941996 4975 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 26 00:35:58 crc kubenswrapper[4975]: I0126 00:35:58.322256 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-5tswz" event={"ID":"38371a9b-7702-4347-b66a-04b385dc50e5","Type":"ContainerStarted","Data":"4e749bb38791c5edbd3ca46212ee0a192d3373777b3382d461939b938eaac993"} Jan 26 00:35:58 crc kubenswrapper[4975]: I0126 00:35:58.322643 4975 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-5tswz" event={"ID":"38371a9b-7702-4347-b66a-04b385dc50e5","Type":"ContainerStarted","Data":"2f776fe53447b6a551c321f404296aa30f5314994e541cad4b160aea046f2ed0"} Jan 26 00:35:58 crc kubenswrapper[4975]: I0126 00:35:58.347713 4975 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-5tswz" podStartSLOduration=1.2390016209999999 podStartE2EDuration="1.347692239s" podCreationTimestamp="2026-01-26 00:35:57 +0000 UTC" firstStartedPulling="2026-01-26 00:35:57.941673439 +0000 UTC m=+1742.062878943" lastFinishedPulling="2026-01-26 00:35:58.050364067 +0000 UTC m=+1742.171569561" observedRunningTime="2026-01-26 00:35:58.34171067 +0000 UTC m=+1742.462916164" watchObservedRunningTime="2026-01-26 00:35:58.347692239 +0000 UTC m=+1742.468897733" Jan 26 00:36:05 crc kubenswrapper[4975]: I0126 00:36:05.147939 4975 scope.go:117] "RemoveContainer" containerID="4fcb34786e631c61fc6a733cd4fad4abcb9508bd15cfc37254e429a1a83a2060" Jan 26 00:36:05 crc kubenswrapper[4975]: E0126 00:36:05.148832 4975 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-f42fk_openshift-machine-config-operator(b76c31fb-14ea-4b49-8a41-0b2731967b86)\"" pod="openshift-machine-config-operator/machine-config-daemon-f42fk" podUID="b76c31fb-14ea-4b49-8a41-0b2731967b86" Jan 26 00:36:07 crc kubenswrapper[4975]: I0126 00:36:07.505017 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:36:07 crc kubenswrapper[4975]: I0126 00:36:07.505426 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:36:07 crc kubenswrapper[4975]: I0126 00:36:07.545483 4975 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:36:08 crc kubenswrapper[4975]: I0126 00:36:08.458239 4975 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/infrawatch-operators-5tswz" Jan 26 00:36:08 crc kubenswrapper[4975]: I0126 00:36:08.504384 4975 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/infrawatch-operators-5tswz"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515135533402024446 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015135533403017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015135527524016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015135527525015467 5ustar corecore